diff --git a/.vscode/settings.json b/.vscode/settings.json index a5ecae51f0..33f2484297 100644 --- a/.vscode/settings.json +++ b/.vscode/settings.json @@ -30,7 +30,7 @@ "zig.initialSetupDone": true, "zig.buildOption": "build", "zig.zls.zigLibPath": "${workspaceFolder}/vendor/zig/lib", - "zig.buildArgs": ["-Dgenerated-code=./build/debug/codegen"], + "zig.buildArgs": ["-Dgenerated-code=./build/debug/codegen", "--watch", "-fincremental"], "zig.zls.buildOnSaveStep": "check", // "zig.zls.enableBuildOnSave": true, // "zig.buildOnSave": true, diff --git a/build.zig b/build.zig index f3a61aef29..54fddd730d 100644 --- a/build.zig +++ b/build.zig @@ -333,6 +333,22 @@ pub fn build(b: *Build) !void { b.default_step.dependOn(step); } + // zig build watch + // const enable_watch_step = b.option(bool, "watch_step", "Enable the watch step. This reads more files so it is off by default") orelse false; + // if (no_llvm or enable_watch_step) { + // self_hosted_watch.selfHostedExeBuild(b, &build_options) catch @panic("OOM"); + // } + + // zig build check-debug + { + const step = b.step("check-debug", "Check for semantic analysis errors on some platforms"); + addMultiCheck(b, step, build_options, &.{ + .{ .os = .windows, .arch = .x86_64 }, + .{ .os = .mac, .arch = .aarch64 }, + .{ .os = .linux, .arch = .x86_64 }, + }, &.{.Debug}); + } + // zig build check-all { const step = b.step("check-all", "Check for semantic analysis errors on all supported platforms"); diff --git a/misctools/http_bench.zig b/misctools/http_bench.zig index 20d693e902..4fea0260fa 100644 --- a/misctools/http_bench.zig +++ b/misctools/http_bench.zig @@ -12,7 +12,6 @@ const C = bun.C; const clap = @import("../src/deps/zig-clap/clap.zig"); const URL = @import("../src/url.zig").URL; -const Headers = @import("../src/bun.js/webcore/response.zig").Headers; const Method = @import("../src/http/method.zig").Method; const ColonListType = @import("../src/cli/colon_list_type.zig").ColonListType; const HeadersTuple = ColonListType(string, noop_resolver); diff --git a/package.json b/package.json index bc55758c49..d3553805b9 100644 --- a/package.json +++ b/package.json @@ -31,7 +31,7 @@ }, "scripts": { "build": "bun run build:debug", - "watch": "bun zig build check --watch", + "watch": "bun zig build check --watch -fincremental --prominent-compile-errors", "bd": "(bun run --silent build:debug &> /tmp/bun.debug.build.log || (cat /tmp/bun.debug.build.log && rm -rf /tmp/bun.debug.build.log && exit 1)) && rm -f /tmp/bun.debug.build.log && ./build/debug/bun-debug", "build:debug": "bun ./scripts/build.mjs -GNinja -DCMAKE_BUILD_TYPE=Debug -B build/debug", "build:valgrind": "bun ./scripts/build.mjs -GNinja -DCMAKE_BUILD_TYPE=Debug -DENABLE_BASELINE=ON -ENABLE_VALGRIND=ON -B build/debug-valgrind", diff --git a/scripts/pack-codegen-for-zig-team.sh b/scripts/pack-codegen-for-zig-team.sh index 45d6775b2f..5de4f0bd2e 100755 --- a/scripts/pack-codegen-for-zig-team.sh +++ b/scripts/pack-codegen-for-zig-team.sh @@ -5,8 +5,10 @@ if ! test -d build/debug/codegen; then fi out="codegen-for-zig-team.tar.gz" -tar -zcf "$out" \ +tar --no-xattrs \ + --exclude=".DS_Store" \ + -zcf "$out" \ build/debug/codegen \ src/bun.js/bindings/GeneratedBindings.zig \ - src/bun.js/bindings/GeneratedJS2Native.zig + src/bun.js/bindings/GeneratedJS2Native.zig echo "-> $out" diff --git a/src/Global.zig b/src/Global.zig index 221708b3bd..210b02ee02 100644 --- a/src/Global.zig +++ b/src/Global.zig @@ -123,7 +123,10 @@ pub fn exit(code: u32) noreturn { Bun__onExit(); std.os.windows.kernel32.ExitProcess(code); }, - else => bun.C.quick_exit(@bitCast(code)), + else => { + bun.c.quick_exit(@bitCast(code)); + std.c.abort(); // quick_exit should be noreturn + }, } } diff --git a/src/OutputFile.zig b/src/OutputFile.zig index c8a87cf6dc..00345593e8 100644 --- a/src/OutputFile.zig +++ b/src/OutputFile.zig @@ -128,7 +128,7 @@ pub const SavedFile = struct { const store = JSC.WebCore.Blob.Store.initFile( JSC.Node.PathOrFileDescriptor{ .path = JSC.Node.PathLike{ - .string = JSC.PathString.init(path), + .string = bun.PathString.init(path), }, }, mime_type, @@ -251,7 +251,7 @@ pub fn writeToDisk(f: OutputFile, root_dir: std.fs.Dir, root_dir_path: []const u } var path_buf: bun.PathBuffer = undefined; - _ = try JSC.Node.NodeFS.writeFileWithPathBuffer(&path_buf, .{ + _ = try JSC.Node.fs.NodeFS.writeFileWithPathBuffer(&path_buf, .{ .data = .{ .buffer = .{ .buffer = .{ .ptr = @constCast(value.bytes.ptr), @@ -263,7 +263,7 @@ pub fn writeToDisk(f: OutputFile, root_dir: std.fs.Dir, root_dir_path: []const u .mode = if (f.is_executable) 0o755 else 0o644, .dirfd = .fromStdDir(root_dir), .file = .{ .path = .{ - .string = JSC.PathString.init(rel_path), + .string = bun.PathString.init(rel_path), } }, }).unwrap(); }, @@ -278,7 +278,7 @@ pub fn writeToDisk(f: OutputFile, root_dir: std.fs.Dir, root_dir_path: []const u } pub fn moveTo(file: *const OutputFile, _: string, rel_path: []const u8, dir: FileDescriptorType) !void { - try bun.C.moveFileZ(file.value.move.dir, bun.sliceTo(&(try std.posix.toPosixPath(file.value.move.getPathname())), 0), dir, bun.sliceTo(&(try std.posix.toPosixPath(rel_path)), 0)); + try bun.sys.moveFileZ(file.value.move.dir, bun.sliceTo(&(try std.posix.toPosixPath(file.value.move.getPathname())), 0), dir, bun.sliceTo(&(try std.posix.toPosixPath(rel_path)), 0)); } pub fn copyTo(file: *const OutputFile, _: string, rel_path: []const u8, dir: FileDescriptorType) !void { diff --git a/src/StandaloneModuleGraph.zig b/src/StandaloneModuleGraph.zig index 4148c57793..267d1f83f1 100644 --- a/src/StandaloneModuleGraph.zig +++ b/src/StandaloneModuleGraph.zig @@ -136,7 +136,7 @@ pub const StandaloneModuleGraph = struct { loader: bun.options.Loader, contents: [:0]const u8 = "", sourcemap: LazySourceMap, - cached_blob: ?*bun.JSC.WebCore.Blob = null, + cached_blob: ?*bun.webcore.Blob = null, encoding: Encoding = .binary, wtf_string: bun.String = bun.String.empty, bytecode: []u8 = "", @@ -171,13 +171,13 @@ pub const StandaloneModuleGraph = struct { return this.wtf_string.dupeRef(); } - pub fn blob(this: *File, globalObject: *bun.JSC.JSGlobalObject) *bun.JSC.WebCore.Blob { + pub fn blob(this: *File, globalObject: *bun.JSC.JSGlobalObject) *bun.webcore.Blob { if (this.cached_blob == null) { - const store = bun.JSC.WebCore.Blob.Store.init(@constCast(this.contents), bun.default_allocator); + const store = bun.webcore.Blob.Store.init(@constCast(this.contents), bun.default_allocator); // make it never free store.ref(); - const b = bun.JSC.WebCore.Blob.initWithStore(store, globalObject).new(); + const b = bun.webcore.Blob.initWithStore(store, globalObject).new(); b.allocator = bun.default_allocator; if (bun.http.MimeType.byExtensionNoDefault(bun.strings.trimLeadingChar(std.fs.path.extension(this.name), '.'))) |mime| { @@ -659,7 +659,7 @@ pub const StandaloneModuleGraph = struct { Global.exit(1); }; if (comptime !Environment.isWindows) { - _ = bun.C.fchmod(cloned_executable_fd.native(), 0o777); + _ = bun.c.fchmod(cloned_executable_fd.native(), 0o777); } return cloned_executable_fd; }, @@ -727,7 +727,7 @@ pub const StandaloneModuleGraph = struct { // the final 8 bytes in the file are the length of the module graph with padding, excluding the trailer and offsets _ = Syscall.write(cloned_executable_fd, std.mem.asBytes(&total_byte_count)); if (comptime !Environment.isWindows) { - _ = bun.C.fchmod(cloned_executable_fd.native(), 0o777); + _ = bun.c.fchmod(cloned_executable_fd.native(), 0o777); } return cloned_executable_fd; @@ -803,14 +803,14 @@ pub const StandaloneModuleGraph = struct { break :brk outfile_buf_u16[0..outfile_w.len :0]; }; - bun.C.moveOpenedFileAtLoose(fd, .fromStdDir(root_dir), outfile_slice, true).unwrap() catch |err| { + bun.windows.moveOpenedFileAtLoose(fd, .fromStdDir(root_dir), outfile_slice, true).unwrap() catch |err| { if (err == error.EISDIR) { Output.errGeneric("{} is a directory. Please choose a different --outfile or delete the directory", .{bun.fmt.utf16(outfile_slice)}); } else { Output.err(err, "failed to move executable to result path", .{}); } - _ = bun.C.deleteOpenedFile(fd); + _ = bun.windows.deleteOpenedFile(fd); Global.exit(1); }; @@ -832,7 +832,7 @@ pub const StandaloneModuleGraph = struct { Global.exit(1); }; - bun.C.moveFileZWithHandle( + bun.sys.moveFileZWithHandle( fd, bun.FD.cwd(), bun.sliceTo(&(try std.posix.toPosixPath(temp_location)), 0), diff --git a/src/allocators/MemoryReportingAllocator.zig b/src/allocators/MemoryReportingAllocator.zig new file mode 100644 index 0000000000..319c243ea9 --- /dev/null +++ b/src/allocators/MemoryReportingAllocator.zig @@ -0,0 +1,90 @@ +const MemoryReportingAllocator = @This(); +const log = bun.Output.scoped(.MEM, false); + +child_allocator: std.mem.Allocator, +memory_cost: std.atomic.Value(usize) = std.atomic.Value(usize).init(0), + +fn alloc(context: *anyopaque, n: usize, alignment: std.mem.Alignment, return_address: usize) ?[*]u8 { + const this: *MemoryReportingAllocator = @alignCast(@ptrCast(context)); + const result = this.child_allocator.rawAlloc(n, alignment, return_address) orelse return null; + _ = this.memory_cost.fetchAdd(n, .monotonic); + if (comptime Environment.allow_assert) + log("malloc({d}) = {d}", .{ n, this.memory_cost.raw }); + return result; +} + +pub fn discard(this: *MemoryReportingAllocator, buf: []const u8) void { + _ = this.memory_cost.fetchSub(buf.len, .monotonic); + if (comptime Environment.allow_assert) + log("discard({d}) = {d}", .{ buf.len, this.memory_cost.raw }); +} + +fn resize(context: *anyopaque, buf: []u8, alignment: std.mem.Alignment, new_len: usize, ret_addr: usize) bool { + const this: *MemoryReportingAllocator = @alignCast(@ptrCast(context)); + if (this.child_allocator.rawResize(buf, alignment, new_len, ret_addr)) { + _ = this.memory_cost.fetchAdd(new_len -| buf.len, .monotonic); + if (comptime Environment.allow_assert) + log("resize() = {d}", .{this.memory_cost.raw}); + return true; + } else { + return false; + } +} + +fn free(context: *anyopaque, buf: []u8, alignment: std.mem.Alignment, ret_addr: usize) void { + const this: *MemoryReportingAllocator = @alignCast(@ptrCast(context)); + this.child_allocator.rawFree(buf, alignment, ret_addr); + + if (comptime Environment.allow_assert) { + _ = this.memory_cost.fetchSub(buf.len, .monotonic); + log("free({d}) = {d}", .{ buf.len, this.memory_cost.raw }); + } +} + +pub fn wrap(this: *MemoryReportingAllocator, allocator_: std.mem.Allocator) std.mem.Allocator { + this.* = .{ + .child_allocator = allocator_, + }; + + return this.allocator(); +} + +pub fn allocator(this: *MemoryReportingAllocator) std.mem.Allocator { + return std.mem.Allocator{ + .ptr = this, + .vtable = &MemoryReportingAllocator.VTable, + }; +} + +pub fn report(this: *MemoryReportingAllocator, vm: *jsc.VM) void { + const mem = this.memory_cost.load(.monotonic); + if (mem > 0) { + vm.reportExtraMemory(mem); + if (comptime Environment.allow_assert) + log("report({d})", .{mem}); + } +} + +pub inline fn assert(this: *const MemoryReportingAllocator) void { + if (comptime !Environment.allow_assert) { + return; + } + + const memory_cost = this.memory_cost.load(.monotonic); + if (memory_cost > 0) { + Output.panic("MemoryReportingAllocator still has {d} bytes allocated", .{memory_cost}); + } +} + +pub const VTable = std.mem.Allocator.VTable{ + .alloc = &alloc, + .resize = &resize, + .remap = &std.mem.Allocator.noRemap, + .free = &free, +}; + +const std = @import("std"); +const bun = @import("bun"); +const jsc = bun.jsc; +const Environment = bun.Environment; +const Output = bun.Output; diff --git a/src/allocators/linux_memfd_allocator.zig b/src/allocators/linux_memfd_allocator.zig index f3823a11a9..cfc14e13d6 100644 --- a/src/allocators/linux_memfd_allocator.zig +++ b/src/allocators/linux_memfd_allocator.zig @@ -76,7 +76,7 @@ pub const LinuxMemFdAllocator = struct { }; }; - pub fn alloc(this: *LinuxMemFdAllocator, len: usize, offset: usize, flags: std.posix.MAP) bun.JSC.Maybe(bun.JSC.WebCore.Blob.ByteStore) { + pub fn alloc(this: *LinuxMemFdAllocator, len: usize, offset: usize, flags: std.posix.MAP) bun.JSC.Maybe(bun.webcore.Blob.Store.Bytes) { var size = len; // size rounded up to nearest page @@ -95,7 +95,7 @@ pub const LinuxMemFdAllocator = struct { )) { .result => |slice| { return .{ - .result = bun.JSC.WebCore.Blob.ByteStore{ + .result = bun.webcore.Blob.Store.Bytes{ .cap = @truncate(slice.len), .ptr = slice.ptr, .len = @truncate(len), @@ -123,7 +123,7 @@ pub const LinuxMemFdAllocator = struct { return bytes.len >= 1024 * 1024 * 8; } - pub fn create(bytes: []const u8) bun.JSC.Maybe(bun.JSC.WebCore.Blob.ByteStore) { + pub fn create(bytes: []const u8) bun.JSC.Maybe(bun.webcore.Blob.Store.Bytes) { if (comptime !bun.Environment.isLinux) { unreachable; } diff --git a/src/analytics/analytics_thread.zig b/src/analytics/analytics_thread.zig index a876548c0f..88de98944e 100644 --- a/src/analytics/analytics_thread.zig +++ b/src/analytics/analytics_thread.zig @@ -8,7 +8,6 @@ const MutableString = bun.MutableString; const stringZ = bun.stringZ; const default_allocator = bun.default_allocator; const FeatureFlags = bun.FeatureFlags; -const C = bun.C; const sync = @import("../sync.zig"); const std = @import("std"); @@ -79,7 +78,7 @@ pub fn isCI() bool { /// This answers, "What parts of bun are people actually using?" pub const Features = struct { - pub var builtin_modules = std.enums.EnumSet(bun.JSC.HardcodedModule).initEmpty(); + pub var builtin_modules = std.enums.EnumSet(bun.jsc.ModuleLoader.HardcodedModule).initEmpty(); pub var @"Bun.stderr": usize = 0; pub var @"Bun.stdin": usize = 0; diff --git a/src/async/posix_event_loop.zig b/src/async/posix_event_loop.zig index 470830bf4f..5ccbae9e78 100644 --- a/src/async/posix_event_loop.zig +++ b/src/async/posix_event_loop.zig @@ -157,9 +157,9 @@ pub const FilePoll = struct { const StaticPipeWriter = Subprocess.StaticPipeWriter.Poll; const ShellStaticPipeWriter = bun.shell.ShellSubprocess.StaticPipeWriter.Poll; const FileSink = JSC.WebCore.FileSink.Poll; - const DNSResolver = JSC.DNS.DNSResolver; - const GetAddrInfoRequest = JSC.DNS.GetAddrInfoRequest; - const Request = JSC.DNS.InternalDNS.Request; + const DNSResolver = bun.api.DNS.DNSResolver; + const GetAddrInfoRequest = bun.api.DNS.GetAddrInfoRequest; + const Request = bun.api.DNS.InternalDNS.Request; const LifecycleScriptSubprocessOutputReader = bun.install.LifecycleScriptSubprocess.OutputReader; const BufferedReader = bun.io.BufferedReader; @@ -904,7 +904,7 @@ pub const FilePoll = struct { &timeout, ); - if (bun.C.getErrno(rc) == .INTR) continue; + if (bun.sys.getErrno(rc) == .INTR) continue; break :rc rc; } }; @@ -921,7 +921,7 @@ pub const FilePoll = struct { // indicate the error condition. } - const errno = bun.C.getErrno(rc); + const errno = bun.sys.getErrno(rc); if (errno != .SUCCESS) { this.deactivate(loop); @@ -1073,7 +1073,7 @@ pub const FilePoll = struct { // indicate the error condition. } - const errno = bun.C.getErrno(rc); + const errno = bun.sys.getErrno(rc); switch (rc) { std.math.minInt(@TypeOf(rc))...-1 => return JSC.Maybe(void).errnoSys(@intFromEnum(errno), .kevent).?, else => {}, diff --git a/src/bake/DevServer.zig b/src/bake/DevServer.zig index 736fc56e85..16204be37c 100644 --- a/src/bake/DevServer.zig +++ b/src/bake/DevServer.zig @@ -7,7 +7,7 @@ //! //! All work is held in-memory, using manually managed data-oriented design. //! For questions about DevServer, please consult the delusional @paperclover -pub const DevServer = @This(); +const DevServer = @This(); pub const debug = bun.Output.Scoped(.DevServer, false); pub const igLog = bun.Output.scoped(.IncrementalGraph, false); const DebugHTTPServer = @import("../bun.js/api/server.zig").DebugHTTPServer; @@ -5612,10 +5612,10 @@ pub const SerializedFailure = struct { // // // } // if (value.jsType() == .DOMWrapper) { - // if (value.as(JSC.BuildMessage)) |build_error| { + // if (value.as(bun.api.BuildMessage)) |build_error| { // _ = build_error; // autofix // // - // } else if (value.as(JSC.ResolveMessage)) |resolve_error| { + // } else if (value.as(bun.api.ResolveMessage)) |resolve_error| { // _ = resolve_error; // autofix // @panic("TODO"); // } @@ -7644,7 +7644,7 @@ const ThreadlocalArena = @import("../allocators/mimalloc_arena.zig").Arena; const Watcher = bun.Watcher; const StaticRoute = bun.server.StaticRoute; -const AnyBlob = JSC.WebCore.AnyBlob; +const AnyBlob = JSC.WebCore.Blob.Any; const SourceMap = bun.sourcemap; const VLQ = SourceMap.VLQ; diff --git a/src/bake/production.zig b/src/bake/production.zig index f886753780..c38b661562 100644 --- a/src/bake/production.zig +++ b/src/bake/production.zig @@ -633,7 +633,7 @@ pub export fn BakeProdResolve(global: *JSC.JSGlobalObject, a_str: bun.String, sp const specifier = specifier_str.toUTF8(alloc); defer specifier.deinit(); - if (JSC.HardcodedModule.Alias.get(specifier.slice(), .bun)) |alias| { + if (JSC.ModuleLoader.HardcodedModule.Alias.get(specifier.slice(), .bun)) |alias| { return bun.String.static(alias.path); } diff --git a/src/bindgen.zig b/src/bindgen.zig deleted file mode 100644 index 5540f0a554..0000000000 --- a/src/bindgen.zig +++ /dev/null @@ -1,9 +0,0 @@ -pub const bindgen = true; - -pub const main = @import("./bun.js/bindings/bindings-generator.zig").main; -pub export fn PLCrashReportHandler(_: ?*anyopaque) void {} -pub export fn mkdirp(_: ?*anyopaque) void {} -pub const build_options = @import("build_options"); -pub const bun = @import("./BunObject.zig"); -pub const JavaScriptCore = @import("./jsc.zig"); -pub const C = @import("./c.zig"); diff --git a/src/bit_set.zig b/src/bit_set.zig index 21c283a8b8..f501faa32f 100644 --- a/src/bit_set.zig +++ b/src/bit_set.zig @@ -83,7 +83,7 @@ pub fn IntegerBitSet(comptime size: u16) type { } /// Returns the number of bits in this bit set - pub inline fn capacity(self: Self) usize { + pub fn capacity(self: Self) callconv(bun.callconv_inline) usize { _ = self; return bit_length; } @@ -408,7 +408,7 @@ pub fn ArrayBitSet(comptime MaskIntType: type, comptime size: usize) type { } /// Returns the number of bits in this bit set - pub inline fn capacity(self: Self) usize { + pub fn capacity(self: Self) callconv(bun.callconv_inline) usize { _ = self; return bit_length; } @@ -669,13 +669,13 @@ pub fn ArrayBitSet(comptime MaskIntType: type, comptime size: usize) type { return BitSetIterator(MaskInt, options); } - inline fn maskBit(index: usize) MaskInt { + fn maskBit(index: usize) callconv(bun.callconv_inline) MaskInt { return @as(MaskInt, 1) << @as(ShiftInt, @truncate(index)); } - inline fn maskIndex(index: usize) usize { + fn maskIndex(index: usize) callconv(bun.callconv_inline) usize { return index >> @bitSizeOf(ShiftInt); } - inline fn boolMaskBit(index: usize, value: bool) MaskInt { + fn boolMaskBit(index: usize, value: bool) callconv(bun.callconv_inline) MaskInt { return @as(MaskInt, @intFromBool(value)) << @as(ShiftInt, @intCast(index)); } }; @@ -868,7 +868,7 @@ pub const DynamicBitSetUnmanaged = struct { } /// Returns the number of bits in this bit set - pub inline fn capacity(self: Self) usize { + pub fn capacity(self: Self) callconv(bun.callconv_inline) usize { return self.bit_length; } @@ -1191,7 +1191,7 @@ pub const AutoBitSet = union(enum) { static: Static, dynamic: DynamicBitSetUnmanaged, - pub inline fn needsDynamic(bit_length: usize) bool { + pub fn needsDynamic(bit_length: usize) callconv(bun.callconv_inline) bool { return bit_length > Static.bit_length; } @@ -1351,7 +1351,7 @@ pub const DynamicBitSet = struct { } /// Returns the number of bits in this bit set - pub inline fn capacity(self: Self) usize { + pub fn capacity(self: Self) callconv(bun.callconv_inline) usize { return self.unmanaged.capacity(); } @@ -1543,7 +1543,7 @@ pub fn BitSetIterator(comptime MaskInt: type, comptime options: IteratorOptions) // isn't a next word. If the next word is the // last word, mask off the padding bits so we // don't visit them. - inline fn nextWord(self: *Self, comptime is_first_word: bool) void { + fn nextWord(self: *Self, comptime is_first_word: bool) callconv(bun.callconv_inline) void { var word = switch (direction) { .forward => self.words_remain[0], .reverse => self.words_remain[self.words_remain.len - 1], diff --git a/src/btjs.zig b/src/btjs.zig index 07ff6fcd52..b07aa38dc8 100644 --- a/src/btjs.zig +++ b/src/btjs.zig @@ -94,7 +94,7 @@ fn printSourceAtAddress(debug_info: *std.debug.SelfInfo, out_stream: anytype, ad const frame: *const bun.JSC.CallFrame = @ptrFromInt(fp); if (do_llint) { - const srcloc = frame.getCallerSrcLoc(bun.JSC.Bun__getVM().global); + const srcloc = frame.getCallerSrcLoc(bun.JSC.VirtualMachine.get().global); try tty_config.setColor(out_stream, .bold); try out_stream.print("{s}:{d}:{d}: ", .{ srcloc.str, srcloc.line, srcloc.column }); try tty_config.setColor(out_stream, .reset); diff --git a/src/bun.js/ConsoleObject.zig b/src/bun.js/ConsoleObject.zig index d1dd76ea4b..98fb8f10dd 100644 --- a/src/bun.js/ConsoleObject.zig +++ b/src/bun.js/ConsoleObject.zig @@ -2571,7 +2571,7 @@ pub const Formatter = struct { } else if (value.as(JSC.WebCore.S3Client)) |s3client| { s3client.writeFormat(ConsoleObject.Formatter, this, writer_, enable_ansi_colors) catch {}; return; - } else if (value.as(JSC.FetchHeaders) != null) { + } else if (value.as(bun.webcore.FetchHeaders) != null) { if (value.get_unsafe(this.globalThis, "toJSON")) |toJSONFunction| { this.addForNewLine("Headers ".len); writer.writeAll(comptime Output.prettyFmt("Headers ", enable_ansi_colors)); @@ -2608,7 +2608,7 @@ pub const Formatter = struct { // this case should never happen return try this.printAs(.Undefined, Writer, writer_, .undefined, .Cell, enable_ansi_colors); - } else if (value.as(JSC.API.Bun.Timer.TimeoutObject)) |timer| { + } else if (value.as(bun.api.Timer.TimeoutObject)) |timer| { this.addForNewLine("Timeout(# ) ".len + bun.fmt.fastDigitCount(@as(u64, @intCast(@max(timer.internals.id, 0))))); if (timer.internals.flags.kind == .setInterval) { this.addForNewLine("repeats ".len + bun.fmt.fastDigitCount(@as(u64, @intCast(@max(timer.internals.id, 0))))); @@ -2622,17 +2622,17 @@ pub const Formatter = struct { } return; - } else if (value.as(JSC.API.Bun.Timer.ImmediateObject)) |immediate| { + } else if (value.as(bun.api.Timer.ImmediateObject)) |immediate| { this.addForNewLine("Immediate(# ) ".len + bun.fmt.fastDigitCount(@as(u64, @intCast(@max(immediate.internals.id, 0))))); writer.print(comptime Output.prettyFmt("Immediate (#{d})", enable_ansi_colors), .{ immediate.internals.id, }); return; - } else if (value.as(JSC.BuildMessage)) |build_log| { + } else if (value.as(bun.api.BuildMessage)) |build_log| { build_log.msg.writeFormat(writer_, enable_ansi_colors) catch {}; return; - } else if (value.as(JSC.ResolveMessage)) |resolve_log| { + } else if (value.as(bun.api.ResolveMessage)) |resolve_log| { resolve_log.msg.writeFormat(writer_, enable_ansi_colors) catch {}; return; } else if (JestPrettyFormat.printAsymmetricMatcher(this, Format, &writer, writer_, name_buf, value, enable_ansi_colors)) { diff --git a/src/bun.js/Debugger.zig b/src/bun.js/Debugger.zig new file mode 100644 index 0000000000..af9bfdb507 --- /dev/null +++ b/src/bun.js/Debugger.zig @@ -0,0 +1,424 @@ +path_or_port: ?[]const u8 = null, +from_environment_variable: []const u8 = "", +script_execution_context_id: u32 = 0, +next_debugger_id: u64 = 1, +poll_ref: bun.Async.KeepAlive = .{}, +wait_for_connection: Wait = .off, +// wait_for_connection: bool = false, +set_breakpoint_on_first_line: bool = false, +mode: enum { + /// Bun acts as the server. https://debug.bun.sh/ uses this + listen, + /// Bun connects to this path. The VSCode extension uses this. + connect, +} = .listen, + +test_reporter_agent: TestReporterAgent = .{}, +lifecycle_reporter_agent: LifecycleAgent = .{}, +must_block_until_connected: bool = false, + +pub const Wait = enum { off, shortly, forever }; + +pub const log = Output.scoped(.debugger, false); + +extern "c" fn Bun__createJSDebugger(*JSGlobalObject) u32; +extern "c" fn Bun__ensureDebugger(u32, bool) void; +extern "c" fn Bun__startJSDebuggerThread(*JSGlobalObject, u32, *bun.String, c_int, bool) void; +var futex_atomic: std.atomic.Value(u32) = undefined; + +pub fn waitForDebuggerIfNecessary(this: *VirtualMachine) void { + const debugger = &(this.debugger orelse return); + if (!debugger.must_block_until_connected) { + return; + } + defer debugger.must_block_until_connected = false; + + Debugger.log("spin", .{}); + while (futex_atomic.load(.monotonic) > 0) { + bun.Futex.waitForever(&futex_atomic, 1); + } + if (comptime Environment.enable_logs) + Debugger.log("waitForDebugger: {}", .{Output.ElapsedFormatter{ + .colors = Output.enable_ansi_colors_stderr, + .duration_ns = @truncate(@as(u128, @intCast(std.time.nanoTimestamp() - bun.CLI.start_time))), + }}); + + Bun__ensureDebugger(debugger.script_execution_context_id, debugger.wait_for_connection != .off); + + // Sleep up to 30ms for automatic inspection. + const wait_for_connection_delay_ms = 30; + + var deadline: bun.timespec = if (debugger.wait_for_connection == .shortly) bun.timespec.now().addMs(wait_for_connection_delay_ms) else undefined; + + if (comptime Environment.isWindows) { + // TODO: remove this when tickWithTimeout actually works properly on Windows. + if (debugger.wait_for_connection == .shortly) { + uv.uv_update_time(this.uvLoop()); + var timer = bun.default_allocator.create(uv.Timer) catch bun.outOfMemory(); + timer.* = std.mem.zeroes(uv.Timer); + timer.init(this.uvLoop()); + const onDebuggerTimer = struct { + fn call(handle: *uv.Timer) callconv(.C) void { + const vm = VirtualMachine.get(); + vm.debugger.?.poll_ref.unref(vm); + uv.uv_close(@ptrCast(handle), deinitTimer); + } + + fn deinitTimer(handle: *anyopaque) callconv(.C) void { + bun.default_allocator.destroy(@as(*uv.Timer, @alignCast(@ptrCast(handle)))); + } + }.call; + timer.start(wait_for_connection_delay_ms, 0, &onDebuggerTimer); + timer.ref(); + } + } + + while (debugger.wait_for_connection != .off) { + this.eventLoop().tick(); + switch (debugger.wait_for_connection) { + .forever => { + this.eventLoop().autoTickActive(); + + if (comptime Environment.enable_logs) + log("waited: {}", .{std.fmt.fmtDuration(@intCast(@as(i64, @truncate(std.time.nanoTimestamp() - bun.CLI.start_time))))}); + }, + .shortly => { + // Handle .incrementRefConcurrently + if (comptime Environment.isPosix) { + const pending_unref = this.pending_unref_counter; + if (pending_unref > 0) { + this.pending_unref_counter = 0; + this.uwsLoop().unrefCount(pending_unref); + } + } + + this.uwsLoop().tickWithTimeout(&deadline); + + if (comptime Environment.enable_logs) + log("waited: {}", .{std.fmt.fmtDuration(@intCast(@as(i64, @truncate(std.time.nanoTimestamp() - bun.CLI.start_time))))}); + + const elapsed = bun.timespec.now(); + if (elapsed.order(&deadline) != .lt) { + debugger.poll_ref.unref(this); + log("Timed out waiting for the debugger", .{}); + break; + } + }, + .off => { + break; + }, + } + } +} + +pub var has_created_debugger: bool = false; +pub fn create(this: *VirtualMachine, globalObject: *JSGlobalObject) !void { + log("create", .{}); + jsc.markBinding(@src()); + if (!has_created_debugger) { + has_created_debugger = true; + std.mem.doNotOptimizeAway(&TestReporterAgent.Bun__TestReporterAgentDisable); + std.mem.doNotOptimizeAway(&LifecycleAgent.Bun__LifecycleAgentDisable); + std.mem.doNotOptimizeAway(&TestReporterAgent.Bun__TestReporterAgentEnable); + std.mem.doNotOptimizeAway(&LifecycleAgent.Bun__LifecycleAgentEnable); + var debugger = &this.debugger.?; + debugger.script_execution_context_id = Bun__createJSDebugger(globalObject); + if (!this.has_started_debugger) { + this.has_started_debugger = true; + futex_atomic = std.atomic.Value(u32).init(0); + var thread = try std.Thread.spawn(.{}, startJSDebuggerThread, .{this}); + thread.detach(); + } + this.eventLoop().ensureWaker(); + + if (debugger.wait_for_connection != .off) { + debugger.poll_ref.ref(this); + debugger.must_block_until_connected = true; + } + } +} + +pub fn startJSDebuggerThread(other_vm: *VirtualMachine) void { + var arena = bun.MimallocArena.init() catch unreachable; + Output.Source.configureNamedThread("Debugger"); + log("startJSDebuggerThread", .{}); + jsc.markBinding(@src()); + + var vm = VirtualMachine.init(.{ + .allocator = arena.allocator(), + .args = std.mem.zeroes(bun.Schema.Api.TransformOptions), + .store_fd = false, + }) catch @panic("Failed to create Debugger VM"); + vm.allocator = arena.allocator(); + vm.arena = &arena; + + vm.transpiler.configureDefines() catch @panic("Failed to configure defines"); + vm.is_main_thread = false; + vm.eventLoop().ensureWaker(); + + const callback = jsc.OpaqueWrap(VirtualMachine, start); + vm.global.vm().holdAPILock(other_vm, callback); +} + +pub export fn Debugger__didConnect() void { + var this = VirtualMachine.get(); + if (this.debugger.?.wait_for_connection != .off) { + this.debugger.?.wait_for_connection = .off; + this.debugger.?.poll_ref.unref(this); + } +} + +fn start(other_vm: *VirtualMachine) void { + jsc.markBinding(@src()); + + var this = VirtualMachine.get(); + const debugger = other_vm.debugger.?; + const loop = this.eventLoop(); + + if (debugger.from_environment_variable.len > 0) { + var url = bun.String.createUTF8(debugger.from_environment_variable); + + loop.enter(); + defer loop.exit(); + Bun__startJSDebuggerThread(this.global, debugger.script_execution_context_id, &url, 1, debugger.mode == .connect); + } + + if (debugger.path_or_port) |path_or_port| { + var url = bun.String.createUTF8(path_or_port); + + loop.enter(); + defer loop.exit(); + Bun__startJSDebuggerThread(this.global, debugger.script_execution_context_id, &url, 0, debugger.mode == .connect); + } + + this.global.handleRejectedPromises(); + + if (this.log.msgs.items.len > 0) { + this.log.print(Output.errorWriter()) catch {}; + Output.prettyErrorln("\n", .{}); + Output.flush(); + } + + log("wake", .{}); + futex_atomic.store(0, .monotonic); + bun.Futex.wake(&futex_atomic, 1); + + other_vm.eventLoop().wakeup(); + + this.eventLoop().tick(); + + other_vm.eventLoop().wakeup(); + + while (true) { + while (this.isEventLoopAlive()) { + this.tick(); + this.eventLoop().autoTickActive(); + } + + this.eventLoop().tickPossiblyForever(); + } +} + +pub const AsyncTaskTracker = struct { + id: u64, + + pub fn init(vm: *VirtualMachine) AsyncTaskTracker { + return .{ .id = vm.nextAsyncTaskID() }; + } + + pub fn didSchedule(this: AsyncTaskTracker, globalObject: *JSGlobalObject) void { + if (this.id == 0) return; + + didScheduleAsyncCall(globalObject, AsyncCallType.EventListener, this.id, true); + } + + pub fn didCancel(this: AsyncTaskTracker, globalObject: *JSGlobalObject) void { + if (this.id == 0) return; + + didCancelAsyncCall(globalObject, AsyncCallType.EventListener, this.id); + } + + pub fn willDispatch(this: AsyncTaskTracker, globalObject: *JSGlobalObject) void { + if (this.id == 0) { + return; + } + + willDispatchAsyncCall(globalObject, AsyncCallType.EventListener, this.id); + } + + pub fn didDispatch(this: AsyncTaskTracker, globalObject: *JSGlobalObject) void { + if (this.id == 0) { + return; + } + + didDispatchAsyncCall(globalObject, AsyncCallType.EventListener, this.id); + } +}; + +pub const AsyncCallType = enum(u8) { + DOMTimer = 1, + EventListener = 2, + PostMessage = 3, + RequestAnimationFrame = 4, + Microtask = 5, +}; +extern fn Debugger__didScheduleAsyncCall(*JSGlobalObject, AsyncCallType, u64, bool) void; +extern fn Debugger__didCancelAsyncCall(*JSGlobalObject, AsyncCallType, u64) void; +extern fn Debugger__didDispatchAsyncCall(*JSGlobalObject, AsyncCallType, u64) void; +extern fn Debugger__willDispatchAsyncCall(*JSGlobalObject, AsyncCallType, u64) void; + +pub fn didScheduleAsyncCall(globalObject: *JSGlobalObject, call: AsyncCallType, id: u64, single_shot: bool) void { + jsc.markBinding(@src()); + Debugger__didScheduleAsyncCall(globalObject, call, id, single_shot); +} +pub fn didCancelAsyncCall(globalObject: *JSGlobalObject, call: AsyncCallType, id: u64) void { + jsc.markBinding(@src()); + Debugger__didCancelAsyncCall(globalObject, call, id); +} +pub fn didDispatchAsyncCall(globalObject: *JSGlobalObject, call: AsyncCallType, id: u64) void { + jsc.markBinding(@src()); + Debugger__didDispatchAsyncCall(globalObject, call, id); +} +pub fn willDispatchAsyncCall(globalObject: *JSGlobalObject, call: AsyncCallType, id: u64) void { + jsc.markBinding(@src()); + Debugger__willDispatchAsyncCall(globalObject, call, id); +} + +pub const TestReporterAgent = struct { + handle: ?*Handle = null, + const debug = Output.scoped(.TestReporterAgent, false); + + pub const TestStatus = enum(u8) { + pass, + fail, + timeout, + skip, + todo, + }; + pub const Handle = opaque { + extern "c" fn Bun__TestReporterAgentReportTestFound(agent: *Handle, callFrame: *jsc.CallFrame, testId: c_int, name: *bun.String) void; + extern "c" fn Bun__TestReporterAgentReportTestStart(agent: *Handle, testId: c_int) void; + extern "c" fn Bun__TestReporterAgentReportTestEnd(agent: *Handle, testId: c_int, bunTestStatus: TestStatus, elapsed: f64) void; + + pub fn reportTestFound(this: *Handle, callFrame: *jsc.CallFrame, testId: i32, name: *bun.String) void { + Bun__TestReporterAgentReportTestFound(this, callFrame, testId, name); + } + + pub fn reportTestStart(this: *Handle, testId: c_int) void { + Bun__TestReporterAgentReportTestStart(this, testId); + } + + pub fn reportTestEnd(this: *Handle, testId: c_int, bunTestStatus: TestStatus, elapsed: f64) void { + Bun__TestReporterAgentReportTestEnd(this, testId, bunTestStatus, elapsed); + } + }; + pub export fn Bun__TestReporterAgentEnable(agent: *Handle) void { + if (VirtualMachine.get().debugger) |*debugger| { + debug("enable", .{}); + debugger.test_reporter_agent.handle = agent; + } + } + pub export fn Bun__TestReporterAgentDisable(_: *Handle) void { + if (VirtualMachine.get().debugger) |*debugger| { + debug("disable", .{}); + debugger.test_reporter_agent.handle = null; + } + } + + /// Caller must ensure that it is enabled first. + /// + /// Since we may have to call .deinit on the name string. + pub fn reportTestFound(this: TestReporterAgent, callFrame: *jsc.CallFrame, test_id: i32, name: *bun.String) void { + debug("reportTestFound", .{}); + + this.handle.?.reportTestFound(callFrame, test_id, name); + } + + /// Caller must ensure that it is enabled first. + pub fn reportTestStart(this: TestReporterAgent, test_id: i32) void { + debug("reportTestStart", .{}); + this.handle.?.reportTestStart(test_id); + } + + /// Caller must ensure that it is enabled first. + pub fn reportTestEnd(this: TestReporterAgent, test_id: i32, bunTestStatus: TestStatus, elapsed: f64) void { + debug("reportTestEnd", .{}); + this.handle.?.reportTestEnd(test_id, bunTestStatus, elapsed); + } + + pub fn isEnabled(this: TestReporterAgent) bool { + return this.handle != null; + } +}; + +pub const LifecycleAgent = struct { + handle: ?*Handle = null, + const debug = Output.scoped(.LifecycleAgent, false); + + pub const Handle = opaque { + extern "c" fn Bun__LifecycleAgentReportReload(agent: *Handle) void; + extern "c" fn Bun__LifecycleAgentReportError(agent: *Handle, exception: *ZigException) void; + extern "c" fn Bun__LifecycleAgentPreventExit(agent: *Handle) void; + extern "c" fn Bun__LifecycleAgentStopPreventingExit(agent: *Handle) void; + + pub fn preventExit(this: *Handle) void { + Bun__LifecycleAgentPreventExit(this); + } + + pub fn stopPreventingExit(this: *Handle) void { + Bun__LifecycleAgentStopPreventingExit(this); + } + + pub fn reportReload(this: *Handle) void { + debug("reportReload", .{}); + Bun__LifecycleAgentReportReload(this); + } + + pub fn reportError(this: *Handle, exception: *ZigException) void { + debug("reportError", .{}); + Bun__LifecycleAgentReportError(this, exception); + } + }; + + pub export fn Bun__LifecycleAgentEnable(agent: *Handle) void { + if (VirtualMachine.get().debugger) |*debugger| { + debug("enable", .{}); + debugger.lifecycle_reporter_agent.handle = agent; + } + } + + pub export fn Bun__LifecycleAgentDisable(agent: *Handle) void { + _ = agent; // autofix + if (VirtualMachine.get().debugger) |*debugger| { + debug("disable", .{}); + debugger.lifecycle_reporter_agent.handle = null; + } + } + + pub fn reportReload(this: *LifecycleAgent) void { + if (this.handle) |handle| { + handle.reportReload(); + } + } + + pub fn reportError(this: *LifecycleAgent, exception: *ZigException) void { + if (this.handle) |handle| { + handle.reportError(exception); + } + } + + pub fn isEnabled(this: *const LifecycleAgent) bool { + return this.handle != null; + } +}; + +const std = @import("std"); +const bun = @import("bun"); +const uv = bun.windows.libuv; +const Output = bun.Output; +const Environment = bun.Environment; +const jsc = bun.jsc; +const VirtualMachine = jsc.VirtualMachine; +const ZigException = jsc.ZigException; +const Debugger = jsc.Debugger; +const JSGlobalObject = jsc.JSGlobalObject; diff --git a/src/bun.js/ModuleLoader.zig b/src/bun.js/ModuleLoader.zig new file mode 100644 index 0000000000..1437fb30a9 --- /dev/null +++ b/src/bun.js/ModuleLoader.zig @@ -0,0 +1,3077 @@ +const ModuleLoader = @This(); + +pub const node_fallbacks = @import("../node_fallbacks.zig"); + +transpile_source_code_arena: ?*bun.ArenaAllocator = null, +eval_source: ?*logger.Source = null, + +comptime { + _ = Bun__transpileVirtualModule; + _ = Bun__runVirtualModule; + _ = Bun__transpileFile; + _ = Bun__fetchBuiltinModule; + _ = Bun__getDefaultLoader; +} + +pub var is_allowed_to_use_internal_testing_apis = false; + +/// This must be called after calling transpileSourceCode +pub fn resetArena(this: *ModuleLoader, jsc_vm: *VirtualMachine) void { + bun.assert(&jsc_vm.module_loader == this); + if (this.transpile_source_code_arena) |arena| { + if (jsc_vm.smol) { + _ = arena.reset(.free_all); + } else { + _ = arena.reset(.{ .retain_with_limit = 8 * 1024 * 1024 }); + } + } +} + +pub fn resolveEmbeddedFile(vm: *VirtualMachine, input_path: []const u8, extname: []const u8) ?[]const u8 { + if (input_path.len == 0) return null; + var graph = vm.standalone_module_graph orelse return null; + const file = graph.find(input_path) orelse return null; + + if (comptime Environment.isLinux) { + // TODO: use /proc/fd/12346 instead! Avoid the copy! + } + + // atomically write to a tmpfile and then move it to the final destination + var tmpname_buf: bun.PathBuffer = undefined; + const tmpfilename = bun.sliceTo(bun.fs.FileSystem.instance.tmpname(extname, &tmpname_buf, bun.hash(file.name)) catch return null, 0); + + const tmpdir: bun.FD = .fromStdDir(bun.fs.FileSystem.instance.tmpdir() catch return null); + + // First we open the tmpfile, to avoid any other work in the event of failure. + const tmpfile = bun.Tmpfile.create(tmpdir, tmpfilename).unwrap() catch return null; + defer tmpfile.fd.close(); + + switch (bun.api.node.fs.NodeFS.writeFileWithPathBuffer( + &tmpname_buf, // not used + + .{ + .data = .{ + .encoded_slice = ZigString.Slice.fromUTF8NeverFree(file.contents), + }, + .dirfd = tmpdir, + .file = .{ .fd = tmpfile.fd }, + .encoding = .buffer, + }, + )) { + .err => { + return null; + }, + else => {}, + } + return bun.path.joinAbs(bun.fs.FileSystem.instance.fs.tmpdirPath(), .auto, tmpfilename); +} + +pub const AsyncModule = struct { + // This is all the state used by the printer to print the module + parse_result: ParseResult, + promise: JSC.Strong = .empty, + path: Fs.Path, + specifier: string = "", + referrer: string = "", + string_buf: []u8 = &[_]u8{}, + fd: ?StoredFileDescriptorType = null, + package_json: ?*PackageJSON = null, + loader: Api.Loader, + hash: u32 = std.math.maxInt(u32), + globalThis: *JSGlobalObject = undefined, + arena: *bun.ArenaAllocator, + + // This is the specific state for making it async + poll_ref: Async.KeepAlive = .{}, + any_task: JSC.AnyTask = undefined, + + pub const Id = u32; + + const PackageDownloadError = struct { + name: []const u8, + resolution: Install.Resolution, + err: anyerror, + url: []const u8, + }; + + const PackageResolveError = struct { + name: []const u8, + err: anyerror, + url: []const u8, + version: Dependency.Version, + }; + + pub const Queue = struct { + map: Map = .{}, + scheduled: u32 = 0, + concurrent_task_count: std.atomic.Value(u32) = std.atomic.Value(u32).init(0), + + const DeferredDependencyError = struct { + dependency: Dependency, + root_dependency_id: Install.DependencyID, + err: anyerror, + }; + + pub const Map = std.ArrayListUnmanaged(AsyncModule); + + pub fn enqueue(this: *Queue, globalObject: *JSGlobalObject, opts: anytype) void { + debug("enqueue: {s}", .{opts.specifier}); + var module = AsyncModule.init(opts, globalObject) catch unreachable; + module.poll_ref.ref(this.vm()); + + this.map.append(this.vm().allocator, module) catch unreachable; + this.vm().packageManager().drainDependencyList(); + } + + pub fn onDependencyError(ctx: *anyopaque, dependency: Dependency, root_dependency_id: Install.DependencyID, err: anyerror) void { + var this = bun.cast(*Queue, ctx); + debug("onDependencyError: {s}", .{this.vm().packageManager().lockfile.str(&dependency.name)}); + + var modules: []AsyncModule = this.map.items; + var i: usize = 0; + outer: for (modules) |module_| { + var module = module_; + const root_dependency_ids = module.parse_result.pending_imports.items(.root_dependency_id); + for (root_dependency_ids, 0..) |dep, dep_i| { + if (dep != root_dependency_id) continue; + module.resolveError( + this.vm(), + module.parse_result.pending_imports.items(.import_record_id)[dep_i], + .{ + .name = this.vm().packageManager().lockfile.str(&dependency.name), + .err = err, + .url = "", + .version = dependency.version, + }, + ) catch unreachable; + continue :outer; + } + + modules[i] = module; + i += 1; + } + this.map.items.len = i; + } + pub fn onWakeHandler(ctx: *anyopaque, _: *PackageManager) void { + debug("onWake", .{}); + var this = bun.cast(*Queue, ctx); + this.vm().enqueueTaskConcurrent(JSC.ConcurrentTask.createFrom(this)); + } + + pub fn onPoll(this: *Queue) void { + debug("onPoll", .{}); + this.runTasks(); + this.pollModules(); + } + + pub fn runTasks(this: *Queue) void { + var pm = this.vm().packageManager(); + + if (Output.enable_ansi_colors_stderr) { + pm.startProgressBarIfNone(); + pm.runTasks( + *Queue, + this, + .{ + .onExtract = {}, + .onResolve = onResolve, + .onPackageManifestError = onPackageManifestError, + .onPackageDownloadError = onPackageDownloadError, + .progress_bar = true, + }, + true, + PackageManager.Options.LogLevel.default, + ) catch unreachable; + } else { + pm.runTasks( + *Queue, + this, + .{ + .onExtract = {}, + .onResolve = onResolve, + .onPackageManifestError = onPackageManifestError, + .onPackageDownloadError = onPackageDownloadError, + }, + true, + PackageManager.Options.LogLevel.default_no_progress, + ) catch unreachable; + } + } + + pub fn onResolve(_: *Queue) void { + debug("onResolve", .{}); + } + + pub fn onPackageManifestError( + this: *Queue, + name: []const u8, + err: anyerror, + url: []const u8, + ) void { + debug("onPackageManifestError: {s}", .{name}); + + var modules: []AsyncModule = this.map.items; + var i: usize = 0; + outer: for (modules) |module_| { + var module = module_; + const tags = module.parse_result.pending_imports.items(.tag); + for (tags, 0..) |tag, tag_i| { + if (tag == .resolve) { + const esms = module.parse_result.pending_imports.items(.esm); + const esm = esms[tag_i]; + const string_bufs = module.parse_result.pending_imports.items(.string_buf); + + if (!strings.eql(esm.name.slice(string_bufs[tag_i]), name)) continue; + + const versions = module.parse_result.pending_imports.items(.dependency); + + module.resolveError( + this.vm(), + module.parse_result.pending_imports.items(.import_record_id)[tag_i], + .{ + .name = name, + .err = err, + .url = url, + .version = versions[tag_i], + }, + ) catch unreachable; + continue :outer; + } + } + + modules[i] = module; + i += 1; + } + this.map.items.len = i; + } + + pub fn onPackageDownloadError( + this: *Queue, + package_id: Install.PackageID, + name: []const u8, + resolution: *const Install.Resolution, + err: anyerror, + url: []const u8, + ) void { + debug("onPackageDownloadError: {s}", .{name}); + + const resolution_ids = this.vm().packageManager().lockfile.buffers.resolutions.items; + var modules: []AsyncModule = this.map.items; + var i: usize = 0; + outer: for (modules) |module_| { + var module = module_; + const record_ids = module.parse_result.pending_imports.items(.import_record_id); + const root_dependency_ids = module.parse_result.pending_imports.items(.root_dependency_id); + for (root_dependency_ids, 0..) |dependency_id, import_id| { + if (resolution_ids[dependency_id] != package_id) continue; + module.downloadError( + this.vm(), + record_ids[import_id], + .{ + .name = name, + .resolution = resolution.*, + .err = err, + .url = url, + }, + ) catch unreachable; + continue :outer; + } + + modules[i] = module; + i += 1; + } + this.map.items.len = i; + } + + pub fn pollModules(this: *Queue) void { + var pm = this.vm().packageManager(); + if (pm.pending_tasks.load(.monotonic) > 0) return; + + var modules: []AsyncModule = this.map.items; + var i: usize = 0; + + for (modules) |mod| { + var module = mod; + var tags = module.parse_result.pending_imports.items(.tag); + const root_dependency_ids = module.parse_result.pending_imports.items(.root_dependency_id); + // var esms = module.parse_result.pending_imports.items(.esm); + // var versions = module.parse_result.pending_imports.items(.dependency); + var done_count: usize = 0; + for (tags, 0..) |tag, tag_i| { + const root_id = root_dependency_ids[tag_i]; + const resolution_ids = pm.lockfile.buffers.resolutions.items; + if (root_id >= resolution_ids.len) continue; + const package_id = resolution_ids[root_id]; + + switch (tag) { + .resolve => { + if (package_id == Install.invalid_package_id) { + continue; + } + + // if we get here, the package has already been resolved. + tags[tag_i] = .download; + }, + .download => { + if (package_id == Install.invalid_package_id) { + unreachable; + } + }, + .done => { + done_count += 1; + continue; + }, + } + + if (package_id == Install.invalid_package_id) { + continue; + } + + const package = pm.lockfile.packages.get(package_id); + bun.assert(package.resolution.tag != .root); + + var name_and_version_hash: ?u64 = null; + var patchfile_hash: ?u64 = null; + switch (pm.determinePreinstallState(package, pm.lockfile, &name_and_version_hash, &patchfile_hash)) { + .done => { + // we are only truly done if all the dependencies are done. + const current_tasks = pm.total_tasks; + // so if enqueuing all the dependencies produces no new tasks, we are done. + pm.enqueueDependencyList(package.dependencies); + if (current_tasks == pm.total_tasks) { + tags[tag_i] = .done; + done_count += 1; + } + }, + .extracting => { + // we are extracting the package + // we need to wait for the next poll + continue; + }, + .extract => {}, + else => {}, + } + } + + if (done_count == tags.len) { + module.done(this.vm()); + } else { + modules[i] = module; + i += 1; + } + } + this.map.items.len = i; + if (i == 0) { + // ensure we always end the progress bar + this.vm().packageManager().endProgressBar(); + } + } + + pub fn vm(this: *Queue) *VirtualMachine { + return @alignCast(@fieldParentPtr("modules", this)); + } + }; + + pub fn init(opts: anytype, globalObject: *JSGlobalObject) !AsyncModule { + // var stmt_blocks = js_ast.Stmt.Data.toOwnedSlice(); + // var expr_blocks = js_ast.Expr.Data.toOwnedSlice(); + const this_promise = JSValue.createInternalPromise(globalObject); + const promise = JSC.Strong.create(this_promise, globalObject); + + var buf = bun.StringBuilder{}; + buf.count(opts.referrer); + buf.count(opts.specifier); + buf.count(opts.path.text); + + try buf.allocate(bun.default_allocator); + opts.promise_ptr.?.* = this_promise.asInternalPromise().?; + const referrer = buf.append(opts.referrer); + const specifier = buf.append(opts.specifier); + const path = Fs.Path.init(buf.append(opts.path.text)); + + return AsyncModule{ + .parse_result = opts.parse_result, + .promise = promise, + .path = path, + .specifier = specifier, + .referrer = referrer, + .fd = opts.fd, + .package_json = opts.package_json, + .loader = opts.loader.toAPI(), + .string_buf = buf.allocatedSlice(), + // .stmt_blocks = stmt_blocks, + // .expr_blocks = expr_blocks, + .globalThis = globalObject, + .arena = opts.arena, + }; + } + + pub fn done(this: *AsyncModule, jsc_vm: *VirtualMachine) void { + var clone = jsc_vm.allocator.create(AsyncModule) catch unreachable; + clone.* = this.*; + jsc_vm.modules.scheduled += 1; + clone.any_task = JSC.AnyTask.New(AsyncModule, onDone).init(clone); + jsc_vm.enqueueTask(JSC.Task.init(&clone.any_task)); + } + + pub fn onDone(this: *AsyncModule) void { + JSC.markBinding(@src()); + var jsc_vm = this.globalThis.bunVM(); + jsc_vm.modules.scheduled -= 1; + if (jsc_vm.modules.scheduled == 0) { + jsc_vm.packageManager().endProgressBar(); + } + var log = logger.Log.init(jsc_vm.allocator); + defer log.deinit(); + var errorable: JSC.ErrorableResolvedSource = undefined; + this.poll_ref.unref(jsc_vm); + outer: { + errorable = JSC.ErrorableResolvedSource.ok(this.resumeLoadingModule(&log) catch |err| { + VirtualMachine.processFetchLog( + this.globalThis, + bun.String.init(this.specifier), + bun.String.init(this.referrer), + &log, + &errorable, + err, + ); + break :outer; + }); + } + + var spec = bun.String.init(ZigString.init(this.specifier).withEncoding()); + var ref = bun.String.init(ZigString.init(this.referrer).withEncoding()); + Bun__onFulfillAsyncModule( + this.globalThis, + this.promise.get().?, + &errorable, + &spec, + &ref, + ); + this.deinit(); + jsc_vm.allocator.destroy(this); + } + + pub fn fulfill( + globalThis: *JSGlobalObject, + promise: JSValue, + resolved_source: ResolvedSource, + err: ?anyerror, + specifier_: bun.String, + referrer_: bun.String, + log: *logger.Log, + ) void { + JSC.markBinding(@src()); + var specifier = specifier_; + var referrer = referrer_; + defer { + specifier.deref(); + referrer.deref(); + } + + var errorable: JSC.ErrorableResolvedSource = undefined; + if (err) |e| { + VirtualMachine.processFetchLog( + globalThis, + specifier, + referrer, + log, + &errorable, + e, + ); + } else { + errorable = JSC.ErrorableResolvedSource.ok(resolved_source); + } + log.deinit(); + + debug("fulfill: {any}", .{specifier}); + + Bun__onFulfillAsyncModule( + globalThis, + promise, + &errorable, + &specifier, + &referrer, + ); + } + + pub fn resolveError(this: *AsyncModule, vm: *VirtualMachine, import_record_id: u32, result: PackageResolveError) !void { + const globalThis = this.globalThis; + + const msg: []u8 = try switch (result.err) { + error.PackageManifestHTTP400 => std.fmt.allocPrint( + bun.default_allocator, + "HTTP 400 while resolving package '{s}' at '{s}'", + .{ result.name, result.url }, + ), + error.PackageManifestHTTP401 => std.fmt.allocPrint( + bun.default_allocator, + "HTTP 401 while resolving package '{s}' at '{s}'", + .{ result.name, result.url }, + ), + error.PackageManifestHTTP402 => std.fmt.allocPrint( + bun.default_allocator, + "HTTP 402 while resolving package '{s}' at '{s}'", + .{ result.name, result.url }, + ), + error.PackageManifestHTTP403 => std.fmt.allocPrint( + bun.default_allocator, + "HTTP 403 while resolving package '{s}' at '{s}'", + .{ result.name, result.url }, + ), + error.PackageManifestHTTP404 => std.fmt.allocPrint( + bun.default_allocator, + "Package '{s}' was not found", + .{result.name}, + ), + error.PackageManifestHTTP4xx => std.fmt.allocPrint( + bun.default_allocator, + "HTTP 4xx while resolving package '{s}' at '{s}'", + .{ result.name, result.url }, + ), + error.PackageManifestHTTP5xx => std.fmt.allocPrint( + bun.default_allocator, + "HTTP 5xx while resolving package '{s}' at '{s}'", + .{ result.name, result.url }, + ), + error.DistTagNotFound, error.NoMatchingVersion => brk: { + const prefix: []const u8 = if (result.err == error.NoMatchingVersion and result.version.tag == .npm and result.version.value.npm.version.isExact()) + "Version not found" + else if (result.version.tag == .npm and !result.version.value.npm.version.isExact()) + "No matching version found" + else + "No match found"; + + break :brk std.fmt.allocPrint( + bun.default_allocator, + "{s} '{s}' for package '{s}' (but package exists)", + .{ prefix, vm.packageManager().lockfile.str(&result.version.literal), result.name }, + ); + }, + else => |err| std.fmt.allocPrint( + bun.default_allocator, + "{s} resolving package '{s}' at '{s}'", + .{ bun.asByteSlice(@errorName(err)), result.name, result.url }, + ), + }; + + const name: []const u8 = switch (result.err) { + error.NoMatchingVersion => "PackageVersionNotFound", + error.DistTagNotFound => "PackageTagNotFound", + error.PackageManifestHTTP403 => "PackageForbidden", + error.PackageManifestHTTP404 => "PackageNotFound", + else => "PackageResolveError", + }; + + var error_instance = ZigString.init(msg).withEncoding().toErrorInstance(globalThis); + if (result.url.len > 0) + error_instance.put(globalThis, ZigString.static("url"), ZigString.init(result.url).withEncoding().toJS(globalThis)); + error_instance.put(globalThis, ZigString.static("name"), ZigString.init(name).withEncoding().toJS(globalThis)); + error_instance.put(globalThis, ZigString.static("pkg"), ZigString.init(result.name).withEncoding().toJS(globalThis)); + error_instance.put(globalThis, ZigString.static("specifier"), ZigString.init(this.specifier).withEncoding().toJS(globalThis)); + const location = logger.rangeData(&this.parse_result.source, this.parse_result.ast.import_records.at(import_record_id).range, "").location.?; + error_instance.put(globalThis, ZigString.static("sourceURL"), ZigString.init(this.parse_result.source.path.text).withEncoding().toJS(globalThis)); + error_instance.put(globalThis, ZigString.static("line"), JSValue.jsNumber(location.line)); + if (location.line_text) |line_text| { + error_instance.put(globalThis, ZigString.static("lineText"), ZigString.init(line_text).withEncoding().toJS(globalThis)); + } + error_instance.put(globalThis, ZigString.static("column"), JSValue.jsNumber(location.column)); + if (this.referrer.len > 0 and !strings.eqlComptime(this.referrer, "undefined")) { + error_instance.put(globalThis, ZigString.static("referrer"), ZigString.init(this.referrer).withEncoding().toJS(globalThis)); + } + + const promise_value = this.promise.swap(); + var promise = promise_value.asInternalPromise().?; + promise_value.ensureStillAlive(); + this.poll_ref.unref(vm); + this.deinit(); + promise.rejectAsHandled(globalThis, error_instance); + } + pub fn downloadError(this: *AsyncModule, vm: *VirtualMachine, import_record_id: u32, result: PackageDownloadError) !void { + const globalThis = this.globalThis; + + const msg_args = .{ + result.name, + result.resolution.fmt(vm.packageManager().lockfile.buffers.string_bytes.items, .any), + }; + + const msg: []u8 = try switch (result.err) { + error.TarballHTTP400 => std.fmt.allocPrint( + bun.default_allocator, + "HTTP 400 downloading package '{s}@{any}'", + msg_args, + ), + error.TarballHTTP401 => std.fmt.allocPrint( + bun.default_allocator, + "HTTP 401 downloading package '{s}@{any}'", + msg_args, + ), + error.TarballHTTP402 => std.fmt.allocPrint( + bun.default_allocator, + "HTTP 402 downloading package '{s}@{any}'", + msg_args, + ), + error.TarballHTTP403 => std.fmt.allocPrint( + bun.default_allocator, + "HTTP 403 downloading package '{s}@{any}'", + msg_args, + ), + error.TarballHTTP404 => std.fmt.allocPrint( + bun.default_allocator, + "HTTP 404 downloading package '{s}@{any}'", + msg_args, + ), + error.TarballHTTP4xx => std.fmt.allocPrint( + bun.default_allocator, + "HTTP 4xx downloading package '{s}@{any}'", + msg_args, + ), + error.TarballHTTP5xx => std.fmt.allocPrint( + bun.default_allocator, + "HTTP 5xx downloading package '{s}@{any}'", + msg_args, + ), + error.TarballFailedToExtract => std.fmt.allocPrint( + bun.default_allocator, + "Failed to extract tarball for package '{s}@{any}'", + msg_args, + ), + else => |err| std.fmt.allocPrint( + bun.default_allocator, + "{s} downloading package '{s}@{any}'", + .{ + bun.asByteSlice(@errorName(err)), + result.name, + result.resolution.fmt(vm.packageManager().lockfile.buffers.string_bytes.items, .any), + }, + ), + }; + + const name: []const u8 = switch (result.err) { + error.TarballFailedToExtract => "PackageExtractionError", + error.TarballHTTP403 => "TarballForbiddenError", + error.TarballHTTP404 => "TarballNotFoundError", + else => "TarballDownloadError", + }; + + var error_instance = ZigString.init(msg).withEncoding().toErrorInstance(globalThis); + if (result.url.len > 0) + error_instance.put(globalThis, ZigString.static("url"), ZigString.init(result.url).withEncoding().toJS(globalThis)); + error_instance.put(globalThis, ZigString.static("name"), ZigString.init(name).withEncoding().toJS(globalThis)); + error_instance.put(globalThis, ZigString.static("pkg"), ZigString.init(result.name).withEncoding().toJS(globalThis)); + if (this.specifier.len > 0 and !strings.eqlComptime(this.specifier, "undefined")) { + error_instance.put(globalThis, ZigString.static("referrer"), ZigString.init(this.specifier).withEncoding().toJS(globalThis)); + } + + const location = logger.rangeData(&this.parse_result.source, this.parse_result.ast.import_records.at(import_record_id).range, "").location.?; + error_instance.put(globalThis, ZigString.static("specifier"), ZigString.init( + this.parse_result.ast.import_records.at(import_record_id).path.text, + ).withEncoding().toJS(globalThis)); + error_instance.put(globalThis, ZigString.static("sourceURL"), ZigString.init(this.parse_result.source.path.text).withEncoding().toJS(globalThis)); + error_instance.put(globalThis, ZigString.static("line"), JSValue.jsNumber(location.line)); + if (location.line_text) |line_text| { + error_instance.put(globalThis, ZigString.static("lineText"), ZigString.init(line_text).withEncoding().toJS(globalThis)); + } + error_instance.put(globalThis, ZigString.static("column"), JSValue.jsNumber(location.column)); + + const promise_value = this.promise.swap(); + var promise = promise_value.asInternalPromise().?; + promise_value.ensureStillAlive(); + this.poll_ref.unref(vm); + this.deinit(); + promise.rejectAsHandled(globalThis, error_instance); + } + + pub fn resumeLoadingModule(this: *AsyncModule, log: *logger.Log) !ResolvedSource { + debug("resumeLoadingModule: {s}", .{this.specifier}); + var parse_result = this.parse_result; + const path = this.path; + var jsc_vm = VirtualMachine.get(); + const specifier = this.specifier; + const old_log = jsc_vm.log; + + jsc_vm.transpiler.linker.log = log; + jsc_vm.transpiler.log = log; + jsc_vm.transpiler.resolver.log = log; + jsc_vm.packageManager().log = log; + defer { + jsc_vm.transpiler.linker.log = old_log; + jsc_vm.transpiler.log = old_log; + jsc_vm.transpiler.resolver.log = old_log; + jsc_vm.packageManager().log = old_log; + } + + // We _must_ link because: + // - node_modules bundle won't be properly + try jsc_vm.transpiler.linker.link( + path, + &parse_result, + jsc_vm.origin, + .absolute_path, + false, + true, + ); + this.parse_result = parse_result; + + var printer = VirtualMachine.source_code_printer.?.*; + printer.ctx.reset(); + + { + var mapper = jsc_vm.sourceMapHandler(&printer); + defer VirtualMachine.source_code_printer.?.* = printer; + _ = try jsc_vm.transpiler.printWithSourceMap( + parse_result, + @TypeOf(&printer), + &printer, + .esm_ascii, + mapper.get(), + ); + } + + if (comptime Environment.dump_source) { + dumpSource(jsc_vm, specifier, &printer); + } + + if (jsc_vm.isWatcherEnabled()) { + var resolved_source = jsc_vm.refCountedResolvedSource(printer.ctx.written, bun.String.init(specifier), path.text, null, false); + + if (parse_result.input_fd) |fd_| { + if (std.fs.path.isAbsolute(path.text) and !strings.contains(path.text, "node_modules")) { + _ = jsc_vm.bun_watcher.addFile( + fd_, + path.text, + this.hash, + options.Loader.fromAPI(this.loader), + .invalid, + this.package_json, + true, + ); + } + } + + resolved_source.is_commonjs_module = parse_result.ast.has_commonjs_export_names or parse_result.ast.exports_kind == .cjs; + + return resolved_source; + } + + return ResolvedSource{ + .allocator = null, + .source_code = bun.String.createLatin1(printer.ctx.getWritten()), + .specifier = String.init(specifier), + .source_url = String.init(path.text), + .is_commonjs_module = parse_result.ast.has_commonjs_export_names or parse_result.ast.exports_kind == .cjs, + }; + } + + pub fn deinit(this: *AsyncModule) void { + this.promise.deinit(); + this.parse_result.deinit(); + this.arena.deinit(); + this.globalThis.bunVM().allocator.destroy(this.arena); + // bun.default_allocator.free(this.stmt_blocks); + // bun.default_allocator.free(this.expr_blocks); + + bun.default_allocator.free(this.string_buf); + } + + extern "c" fn Bun__onFulfillAsyncModule( + globalObject: *JSGlobalObject, + promiseValue: JSValue, + res: *JSC.ErrorableResolvedSource, + specifier: *bun.String, + referrer: *bun.String, + ) void; +}; + +pub export fn Bun__getDefaultLoader(global: *JSGlobalObject, str: *const bun.String) Api.Loader { + var jsc_vm = global.bunVM(); + const filename = str.toUTF8(jsc_vm.allocator); + defer filename.deinit(); + const loader = jsc_vm.transpiler.options.loader(Fs.PathName.init(filename.slice()).ext).toAPI(); + if (loader == .file) { + return Api.Loader.js; + } + + return loader; +} + +pub fn transpileSourceCode( + jsc_vm: *VirtualMachine, + specifier: string, + referrer: string, + input_specifier: String, + path: Fs.Path, + loader: options.Loader, + module_type: options.ModuleType, + log: *logger.Log, + virtual_source: ?*const logger.Source, + promise_ptr: ?*?*JSC.JSInternalPromise, + source_code_printer: *js_printer.BufferPrinter, + globalObject: ?*JSGlobalObject, + comptime flags: FetchFlags, +) !ResolvedSource { + const disable_transpilying = comptime flags.disableTranspiling(); + + if (comptime disable_transpilying) { + if (!(loader.isJavaScriptLike() or loader == .toml or loader == .text or loader == .json or loader == .jsonc)) { + // Don't print "export default " + return ResolvedSource{ + .allocator = null, + .source_code = bun.String.empty, + .specifier = input_specifier, + .source_url = input_specifier.createIfDifferent(path.text), + }; + } + } + + switch (loader) { + .js, .jsx, .ts, .tsx, .json, .jsonc, .toml, .text => { + jsc_vm.transpiled_count += 1; + jsc_vm.transpiler.resetStore(); + const hash = bun.Watcher.getHash(path.text); + const is_main = jsc_vm.main.len == path.text.len and + jsc_vm.main_hash == hash and + strings.eqlLong(jsc_vm.main, path.text, false); + + var arena_: ?*bun.ArenaAllocator = brk: { + // Attempt to reuse the Arena from the parser when we can + // This code is potentially re-entrant, so only one Arena can be reused at a time + // That's why we have to check if the Arena is null + // + // Using an Arena here is a significant memory optimization when loading many files + if (jsc_vm.module_loader.transpile_source_code_arena) |shared| { + jsc_vm.module_loader.transpile_source_code_arena = null; + break :brk shared; + } + + // we must allocate the arena so that the pointer it points to is always valid. + const arena = try jsc_vm.allocator.create(bun.ArenaAllocator); + arena.* = bun.ArenaAllocator.init(bun.default_allocator); + break :brk arena; + }; + + var give_back_arena = true; + defer { + if (give_back_arena) { + if (jsc_vm.module_loader.transpile_source_code_arena == null) { + // when .print_source is used + // caller is responsible for freeing the arena + if (flags != .print_source) { + if (jsc_vm.smol) { + _ = arena_.?.reset(.free_all); + } else { + _ = arena_.?.reset(.{ .retain_with_limit = 8 * 1024 * 1024 }); + } + } + + jsc_vm.module_loader.transpile_source_code_arena = arena_; + } else { + arena_.?.deinit(); + jsc_vm.allocator.destroy(arena_.?); + } + } + } + + var arena = arena_.?; + const allocator = arena.allocator(); + + var fd: ?StoredFileDescriptorType = null; + var package_json: ?*PackageJSON = null; + + if (jsc_vm.bun_watcher.indexOf(hash)) |index| { + fd = jsc_vm.bun_watcher.watchlist().items(.fd)[index].unwrapValid(); + package_json = jsc_vm.bun_watcher.watchlist().items(.package_json)[index]; + } + + var cache = JSC.RuntimeTranspilerCache{ + .output_code_allocator = allocator, + .sourcemap_allocator = bun.default_allocator, + }; + + const old = jsc_vm.transpiler.log; + jsc_vm.transpiler.log = log; + jsc_vm.transpiler.linker.log = log; + jsc_vm.transpiler.resolver.log = log; + if (jsc_vm.transpiler.resolver.package_manager) |pm| { + pm.log = log; + } + + defer { + jsc_vm.transpiler.log = old; + jsc_vm.transpiler.linker.log = old; + jsc_vm.transpiler.resolver.log = old; + if (jsc_vm.transpiler.resolver.package_manager) |pm| { + pm.log = old; + } + } + + // this should be a cheap lookup because 24 bytes == 8 * 3 so it's read 3 machine words + const is_node_override = strings.hasPrefixComptime(specifier, node_fallbacks.import_path); + + const macro_remappings = if (jsc_vm.macro_mode or !jsc_vm.has_any_macro_remappings or is_node_override) + MacroRemap{} + else + jsc_vm.transpiler.options.macro_remap; + + var fallback_source: logger.Source = undefined; + + // Usually, we want to close the input file automatically. + // + // If we're re-using the file descriptor from the fs watcher + // Do not close it because that will break the kqueue-based watcher + // + var should_close_input_file_fd = fd == null; + + // We don't want cjs wrappers around non-js files + const module_type_only_for_wrappables = switch (loader) { + .js, .jsx, .ts, .tsx => module_type, + else => .unknown, + }; + + var input_file_fd: StoredFileDescriptorType = bun.invalid_fd; + var parse_options = Transpiler.ParseOptions{ + .allocator = allocator, + .path = path, + .loader = loader, + .dirname_fd = bun.invalid_fd, + .file_descriptor = fd, + .file_fd_ptr = &input_file_fd, + .file_hash = hash, + .macro_remappings = macro_remappings, + .jsx = jsc_vm.transpiler.options.jsx, + .emit_decorator_metadata = jsc_vm.transpiler.options.emit_decorator_metadata, + .virtual_source = virtual_source, + .dont_bundle_twice = true, + .allow_commonjs = true, + .module_type = module_type_only_for_wrappables, + .inject_jest_globals = jsc_vm.transpiler.options.rewrite_jest_for_tests, + .keep_json_and_toml_as_one_statement = true, + .allow_bytecode_cache = true, + .set_breakpoint_on_first_line = is_main and + jsc_vm.debugger != null and + jsc_vm.debugger.?.set_breakpoint_on_first_line and + setBreakPointOnFirstLine(), + .runtime_transpiler_cache = if (!disable_transpilying and !JSC.RuntimeTranspilerCache.is_disabled) &cache else null, + .remove_cjs_module_wrapper = is_main and jsc_vm.module_loader.eval_source != null, + }; + defer { + if (should_close_input_file_fd and input_file_fd != bun.invalid_fd) { + input_file_fd.close(); + input_file_fd = bun.invalid_fd; + } + } + + if (is_node_override) { + if (node_fallbacks.contentsFromPath(specifier)) |code| { + const fallback_path = Fs.Path.initWithNamespace(specifier, "node"); + fallback_source = logger.Source{ .path = fallback_path, .contents = code }; + parse_options.virtual_source = &fallback_source; + } + } + + var parse_result: ParseResult = switch (disable_transpilying or + (loader == .json)) { + inline else => |return_file_only| brk: { + break :brk jsc_vm.transpiler.parseMaybeReturnFileOnly( + parse_options, + null, + return_file_only, + ) orelse { + if (comptime !disable_transpilying) { + if (jsc_vm.isWatcherEnabled()) { + if (input_file_fd.isValid()) { + if (!is_node_override and std.fs.path.isAbsolute(path.text) and !strings.contains(path.text, "node_modules")) { + should_close_input_file_fd = false; + _ = jsc_vm.bun_watcher.addFile( + input_file_fd, + path.text, + hash, + loader, + .invalid, + package_json, + true, + ); + } + } + } + } + + give_back_arena = false; + return error.ParseError; + }; + }, + }; + + if (parse_result.loader == .wasm) { + return transpileSourceCode( + jsc_vm, + specifier, + referrer, + input_specifier, + path, + .wasm, + .unknown, // cjs/esm don't make sense for wasm + log, + &parse_result.source, + promise_ptr, + source_code_printer, + globalObject, + flags, + ); + } + + if (comptime !disable_transpilying) { + if (jsc_vm.isWatcherEnabled()) { + if (input_file_fd.isValid()) { + if (!is_node_override and std.fs.path.isAbsolute(path.text) and !strings.contains(path.text, "node_modules")) { + should_close_input_file_fd = false; + _ = jsc_vm.bun_watcher.addFile( + input_file_fd, + path.text, + hash, + loader, + .invalid, + package_json, + true, + ); + } + } + } + } + + if (jsc_vm.transpiler.log.errors > 0) { + give_back_arena = false; + return error.ParseError; + } + + if (loader == .json) { + return ResolvedSource{ + .allocator = null, + .source_code = bun.String.createUTF8(parse_result.source.contents), + .specifier = input_specifier, + .source_url = input_specifier.createIfDifferent(path.text), + .tag = ResolvedSource.Tag.json_for_object_loader, + }; + } + + if (comptime disable_transpilying) { + return ResolvedSource{ + .allocator = null, + .source_code = switch (comptime flags) { + .print_source_and_clone => bun.String.init(jsc_vm.allocator.dupe(u8, parse_result.source.contents) catch unreachable), + .print_source => bun.String.init(parse_result.source.contents), + else => @compileError("unreachable"), + }, + .specifier = input_specifier, + .source_url = input_specifier.createIfDifferent(path.text), + }; + } + + if (loader == .json or loader == .jsonc or loader == .toml) { + if (parse_result.empty) { + return ResolvedSource{ + .allocator = null, + .specifier = input_specifier, + .source_url = input_specifier.createIfDifferent(path.text), + .jsvalue_for_export = JSValue.createEmptyObject(jsc_vm.global, 0), + .tag = .exports_object, + }; + } + + return ResolvedSource{ + .allocator = null, + .specifier = input_specifier, + .source_url = input_specifier.createIfDifferent(path.text), + .jsvalue_for_export = parse_result.ast.parts.@"[0]"().stmts[0].data.s_expr.value.toJS(allocator, globalObject orelse jsc_vm.global) catch |e| panic("Unexpected JS error: {s}", .{@errorName(e)}), + .tag = .exports_object, + }; + } + + if (parse_result.already_bundled != .none) { + const bytecode_slice = parse_result.already_bundled.bytecodeSlice(); + return ResolvedSource{ + .allocator = null, + .source_code = bun.String.createLatin1(parse_result.source.contents), + .specifier = input_specifier, + .source_url = input_specifier.createIfDifferent(path.text), + .already_bundled = true, + .bytecode_cache = if (bytecode_slice.len > 0) bytecode_slice.ptr else null, + .bytecode_cache_size = bytecode_slice.len, + .is_commonjs_module = parse_result.already_bundled.isCommonJS(), + }; + } + + if (parse_result.empty) { + const was_cjs = (loader == .js or loader == .ts) and brk: { + const ext = std.fs.path.extension(parse_result.source.path.text); + break :brk strings.eqlComptime(ext, ".cjs") or strings.eqlComptime(ext, ".cts"); + }; + if (was_cjs) { + return .{ + .allocator = null, + .source_code = bun.String.static("(function(){})"), + .specifier = input_specifier, + .source_url = input_specifier.createIfDifferent(path.text), + .is_commonjs_module = true, + .tag = .javascript, + }; + } + } + + if (cache.entry) |*entry| { + jsc_vm.source_mappings.putMappings(parse_result.source, .{ + .list = .{ .items = @constCast(entry.sourcemap), .capacity = entry.sourcemap.len }, + .allocator = bun.default_allocator, + }) catch {}; + + if (comptime Environment.allow_assert) { + dumpSourceString(jsc_vm, specifier, entry.output_code.byteSlice()); + } + + return ResolvedSource{ + .allocator = null, + .source_code = switch (entry.output_code) { + .string => entry.output_code.string, + .utf8 => brk: { + const result = bun.String.createUTF8(entry.output_code.utf8); + cache.output_code_allocator.free(entry.output_code.utf8); + entry.output_code.utf8 = ""; + break :brk result; + }, + }, + .specifier = input_specifier, + .source_url = input_specifier.createIfDifferent(path.text), + .is_commonjs_module = entry.metadata.module_type == .cjs, + .tag = brk: { + if (entry.metadata.module_type == .cjs and parse_result.source.path.isFile()) { + const actual_package_json: *PackageJSON = package_json orelse brk2: { + // this should already be cached virtually always so it's fine to do this + const dir_info = (jsc_vm.transpiler.resolver.readDirInfo(parse_result.source.path.name.dir) catch null) orelse + break :brk .javascript; + + break :brk2 dir_info.package_json orelse dir_info.enclosing_package_json; + } orelse break :brk .javascript; + + if (actual_package_json.module_type == .esm) { + break :brk ResolvedSource.Tag.package_json_type_module; + } + } + + break :brk ResolvedSource.Tag.javascript; + }, + }; + } + + const start_count = jsc_vm.transpiler.linker.import_counter; + + // We _must_ link because: + // - node_modules bundle won't be properly + try jsc_vm.transpiler.linker.link( + path, + &parse_result, + jsc_vm.origin, + .absolute_path, + false, + true, + ); + + if (parse_result.pending_imports.len > 0) { + if (promise_ptr == null) { + return error.UnexpectedPendingResolution; + } + + if (parse_result.source.contents_is_recycled) { + // this shared buffer is about to become owned by the AsyncModule struct + jsc_vm.transpiler.resolver.caches.fs.resetSharedBuffer( + jsc_vm.transpiler.resolver.caches.fs.sharedBuffer(), + ); + } + + jsc_vm.modules.enqueue( + globalObject.?, + .{ + .parse_result = parse_result, + .path = path, + .loader = loader, + .fd = fd, + .package_json = package_json, + .hash = hash, + .promise_ptr = promise_ptr, + .specifier = specifier, + .referrer = referrer, + .arena = arena, + }, + ); + give_back_arena = false; + return error.AsyncModule; + } + + if (!jsc_vm.macro_mode) + jsc_vm.resolved_count += jsc_vm.transpiler.linker.import_counter - start_count; + jsc_vm.transpiler.linker.import_counter = 0; + + var printer = source_code_printer.*; + printer.ctx.reset(); + defer source_code_printer.* = printer; + _ = brk: { + var mapper = jsc_vm.sourceMapHandler(&printer); + + break :brk try jsc_vm.transpiler.printWithSourceMap( + parse_result, + @TypeOf(&printer), + &printer, + .esm_ascii, + mapper.get(), + ); + }; + + if (comptime Environment.dump_source) { + dumpSource(jsc_vm, specifier, &printer); + } + + defer { + if (is_main) { + jsc_vm.has_loaded = true; + } + } + + if (jsc_vm.isWatcherEnabled()) { + var resolved_source = jsc_vm.refCountedResolvedSource(printer.ctx.written, input_specifier, path.text, null, false); + resolved_source.is_commonjs_module = parse_result.ast.has_commonjs_export_names or parse_result.ast.exports_kind == .cjs; + return resolved_source; + } + + // Pass along package.json type "module" if set. + const tag: ResolvedSource.Tag = switch (loader) { + .json, .jsonc => .json_for_object_loader, + .js, .jsx, .ts, .tsx => brk: { + const module_type_ = if (package_json) |pkg| pkg.module_type else module_type; + + break :brk switch (module_type_) { + .esm => .package_json_type_module, + .cjs => .package_json_type_commonjs, + else => .javascript, + }; + }, + else => .javascript, + }; + + return .{ + .allocator = null, + .source_code = brk: { + const written = printer.ctx.getWritten(); + const result = cache.output_code orelse bun.String.createLatin1(written); + + if (written.len > 1024 * 1024 * 2 or jsc_vm.smol) { + printer.ctx.buffer.deinit(); + } + + break :brk result; + }, + .specifier = input_specifier, + .source_url = input_specifier.createIfDifferent(path.text), + .is_commonjs_module = parse_result.ast.has_commonjs_export_names or parse_result.ast.exports_kind == .cjs, + .tag = tag, + }; + }, + // provideFetch() should be called + .napi => unreachable, + // .wasm => { + // jsc_vm.transpiled_count += 1; + // var fd: ?StoredFileDescriptorType = null; + + // var allocator = if (jsc_vm.has_loaded) jsc_vm.arena.allocator() else jsc_vm.allocator; + + // const hash = http.Watcher.getHash(path.text); + // if (jsc_vm.watcher) |watcher| { + // if (watcher.indexOf(hash)) |index| { + // const _fd = watcher.watchlist().items(.fd)[index]; + // fd = if (_fd > 0) _fd else null; + // } + // } + + // var parse_options = Transpiler.ParseOptions{ + // .allocator = allocator, + // .path = path, + // .loader = loader, + // .dirname_fd = 0, + // .file_descriptor = fd, + // .file_hash = hash, + // .macro_remappings = MacroRemap{}, + // .jsx = jsc_vm.transpiler.options.jsx, + // }; + + // var parse_result = jsc_vm.transpiler.parse( + // parse_options, + // null, + // ) orelse { + // return error.ParseError; + // }; + + // return ResolvedSource{ + // .allocator = if (jsc_vm.has_loaded) &jsc_vm.allocator else null, + // .source_code = ZigString.init(jsc_vm.allocator.dupe(u8, parse_result.source.contents) catch unreachable), + // .specifier = ZigString.init(specifier), + // .source_url = input_specifier.createIfDifferent(path.text), + // .tag = ResolvedSource.Tag.wasm, + // }; + // }, + .wasm => { + if (strings.eqlComptime(referrer, "undefined") and strings.eqlLong(jsc_vm.main, path.text, true)) { + if (virtual_source) |source| { + if (globalObject) |globalThis| { + // attempt to avoid reading the WASM file twice. + const encoded = JSC.EncodedJSValue{ + .asPtr = globalThis, + }; + const globalValue = @as(JSValue, @enumFromInt(encoded.asInt64)); + globalValue.put( + globalThis, + ZigString.static("wasmSourceBytes"), + JSC.ArrayBuffer.create(globalThis, source.contents, .Uint8Array), + ); + } + } + return ResolvedSource{ + .allocator = null, + .source_code = bun.String.static(@embedFile("../js/wasi-runner.js")), + .specifier = input_specifier, + .source_url = input_specifier.createIfDifferent(path.text), + .tag = .esm, + }; + } + + return transpileSourceCode( + jsc_vm, + specifier, + referrer, + input_specifier, + path, + .file, + .unknown, // cjs/esm don't make sense for wasm + log, + virtual_source, + promise_ptr, + source_code_printer, + globalObject, + flags, + ); + }, + + .sqlite_embedded, .sqlite => { + const sqlite_module_source_code_string = brk: { + if (jsc_vm.hot_reload == .hot) { + break :brk + \\// Generated code + \\import {Database} from 'bun:sqlite'; + \\const {path} = import.meta; + \\ + \\// Don't reload the database if it's already loaded + \\const registry = (globalThis[Symbol.for("bun:sqlite:hot")] ??= new Map()); + \\ + \\export let db = registry.get(path); + \\export const __esModule = true; + \\if (!db) { + \\ // Load the database + \\ db = new Database(path); + \\ registry.set(path, db); + \\} + \\ + \\export default db; + ; + } + + break :brk + \\// Generated code + \\import {Database} from 'bun:sqlite'; + \\export const db = new Database(import.meta.path); + \\ + \\export const __esModule = true; + \\export default db; + ; + }; + + return ResolvedSource{ + .allocator = null, + .source_code = bun.String.createUTF8(sqlite_module_source_code_string), + .specifier = input_specifier, + .source_url = input_specifier.createIfDifferent(path.text), + .tag = .esm, + }; + }, + + .html => { + if (flags.disableTranspiling()) { + return ResolvedSource{ + .allocator = null, + .source_code = bun.String.empty, + .specifier = input_specifier, + .source_url = input_specifier.createIfDifferent(path.text), + .tag = .esm, + }; + } + + if (globalObject == null) { + return error.NotSupported; + } + + const html_bundle = try JSC.API.HTMLBundle.init(globalObject.?, path.text); + return ResolvedSource{ + .allocator = &jsc_vm.allocator, + .jsvalue_for_export = html_bundle.toJS(globalObject.?), + .specifier = input_specifier, + .source_url = input_specifier.createIfDifferent(path.text), + .tag = .export_default_object, + }; + }, + + else => { + if (flags.disableTranspiling()) { + return ResolvedSource{ + .allocator = null, + .source_code = bun.String.empty, + .specifier = input_specifier, + .source_url = input_specifier.createIfDifferent(path.text), + .tag = .esm, + }; + } + + if (virtual_source == null) { + if (jsc_vm.isWatcherEnabled()) auto_watch: { + if (std.fs.path.isAbsolute(path.text) and !strings.contains(path.text, "node_modules")) { + const input_fd: bun.StoredFileDescriptorType = brk: { + // on macOS, we need a file descriptor to receive event notifications on it. + // so we use O_EVTONLY to open the file descriptor without asking any additional permissions. + if (bun.Watcher.requires_file_descriptors) { + switch (bun.sys.open( + &(std.posix.toPosixPath(path.text) catch break :auto_watch), + bun.c.O_EVTONLY, + 0, + )) { + .err => break :auto_watch, + .result => |fd| break :brk fd, + } + } else { + // Otherwise, don't even bother opening it. + break :brk .invalid; + } + }; + const hash = bun.Watcher.getHash(path.text); + switch (jsc_vm.bun_watcher.addFile( + input_fd, + path.text, + hash, + loader, + .invalid, + null, + true, + )) { + .err => { + if (comptime Environment.isMac) { + // If any error occurs and we just + // opened the file descriptor to + // receive event notifications on + // it, we should close it. + if (input_fd.isValid()) { + input_fd.close(); + } + } + + // we don't consider it a failure if we cannot watch the file + // they didn't open the file + }, + .result => {}, + } + } + } + } + + const value = brk: { + if (!jsc_vm.origin.isEmpty()) { + var buf = MutableString.init2048(jsc_vm.allocator) catch bun.outOfMemory(); + defer buf.deinit(); + var writer = buf.writer(); + JSC.API.Bun.getPublicPath(specifier, jsc_vm.origin, @TypeOf(&writer), &writer); + break :brk bun.String.createUTF8ForJS(globalObject.?, buf.slice()); + } + + break :brk bun.String.createUTF8ForJS(globalObject.?, path.text); + }; + + return ResolvedSource{ + .allocator = null, + .jsvalue_for_export = value, + .specifier = input_specifier, + .source_url = input_specifier.createIfDifferent(path.text), + .tag = .export_default_object, + }; + }, + } +} + +pub export fn Bun__resolveAndFetchBuiltinModule( + jsc_vm: *VirtualMachine, + specifier: *bun.String, + ret: *JSC.ErrorableResolvedSource, +) bool { + JSC.markBinding(@src()); + var log = logger.Log.init(jsc_vm.transpiler.allocator); + defer log.deinit(); + + const alias = HardcodedModule.Alias.bun_aliases.getWithEql(specifier.*, bun.String.eqlComptime) orelse + return false; + const hardcoded = HardcodedModule.map.get(alias.path) orelse { + bun.debugAssert(false); + return false; + }; + ret.* = .ok( + getHardcodedModule(jsc_vm, specifier.*, hardcoded) orelse + return false, + ); + return true; +} + +pub export fn Bun__fetchBuiltinModule( + jsc_vm: *VirtualMachine, + globalObject: *JSGlobalObject, + specifier: *bun.String, + referrer: *bun.String, + ret: *JSC.ErrorableResolvedSource, +) bool { + JSC.markBinding(@src()); + var log = logger.Log.init(jsc_vm.transpiler.allocator); + defer log.deinit(); + + if (ModuleLoader.fetchBuiltinModule( + jsc_vm, + specifier.*, + ) catch |err| { + if (err == error.AsyncModule) { + unreachable; + } + + VirtualMachine.processFetchLog(globalObject, specifier.*, referrer.*, &log, ret, err); + return true; + }) |builtin| { + ret.* = JSC.ErrorableResolvedSource.ok(builtin); + return true; + } else { + return false; + } +} + +const always_sync_modules = .{"reflect-metadata"}; + +pub export fn Bun__transpileFile( + jsc_vm: *VirtualMachine, + globalObject: *JSGlobalObject, + specifier_ptr: *bun.String, + referrer: *bun.String, + type_attribute: ?*const bun.String, + ret: *JSC.ErrorableResolvedSource, + allow_promise: bool, + is_commonjs_require: bool, + force_loader_type: bun.options.Loader.Optional, +) ?*anyopaque { + JSC.markBinding(@src()); + var log = logger.Log.init(jsc_vm.transpiler.allocator); + defer log.deinit(); + + var _specifier = specifier_ptr.toUTF8(jsc_vm.allocator); + var referrer_slice = referrer.toUTF8(jsc_vm.allocator); + defer _specifier.deinit(); + defer referrer_slice.deinit(); + + var type_attribute_str: ?string = null; + if (type_attribute) |attribute| if (attribute.asUTF8()) |attr_utf8| { + type_attribute_str = attr_utf8; + }; + + var virtual_source_to_use: ?logger.Source = null; + var blob_to_deinit: ?JSC.WebCore.Blob = null; + var lr = options.getLoaderAndVirtualSource(_specifier.slice(), jsc_vm, &virtual_source_to_use, &blob_to_deinit, type_attribute_str) catch { + ret.* = JSC.ErrorableResolvedSource.err(error.JSErrorObject, globalObject.ERR(.MODULE_NOT_FOUND, "Blob not found", .{}).toJS().asVoid()); + return null; + }; + defer if (blob_to_deinit) |*blob| blob.deinit(); + + if (force_loader_type.unwrap()) |loader_type| { + @branchHint(.unlikely); + bun.assert(!is_commonjs_require); + lr.loader = loader_type; + } else if (is_commonjs_require and jsc_vm.has_mutated_built_in_extensions > 0) { + @branchHint(.unlikely); + if (node_module_module.findLongestRegisteredExtension(jsc_vm, _specifier.slice())) |entry| { + switch (entry) { + .loader => |loader| { + lr.loader = loader; + }, + .custom => |index| { + ret.* = JSC.ErrorableResolvedSource.ok(ResolvedSource{ + .allocator = null, + .source_code = bun.String.empty, + .specifier = .empty, + .source_url = .empty, + .cjs_custom_extension_index = index, + .tag = .common_js_custom_extension, + }); + return null; + }, + } + } + } + + const module_type: options.ModuleType = brk: { + const ext = lr.path.name.ext; + // regular expression /.[cm][jt]s$/ + if (ext.len == ".cjs".len) { + if (strings.eqlComptimeIgnoreLen(ext, ".cjs")) + break :brk .cjs; + if (strings.eqlComptimeIgnoreLen(ext, ".mjs")) + break :brk .esm; + if (strings.eqlComptimeIgnoreLen(ext, ".cts")) + break :brk .cjs; + if (strings.eqlComptimeIgnoreLen(ext, ".mts")) + break :brk .esm; + } + // regular expression /.[jt]s$/ + if (ext.len == ".ts".len) { + if (strings.eqlComptimeIgnoreLen(ext, ".js") or + strings.eqlComptimeIgnoreLen(ext, ".ts")) + { + // Use the package.json module type if it exists + break :brk if (lr.package_json) |pkg| + pkg.module_type + else + .unknown; + } + } + // For JSX TSX and other extensions, let the file contents. + break :brk .unknown; + }; + const pkg_name: ?[]const u8 = if (lr.package_json) |pkg| + if (pkg.name.len > 0) pkg.name else null + else + null; + + // We only run the transpiler concurrently when we can. + // Today, that's: + // + // Import Statements (import 'foo') + // Import Expressions (import('foo')) + // + transpile_async: { + if (comptime bun.FeatureFlags.concurrent_transpiler) { + const concurrent_loader = lr.loader orelse .file; + if (blob_to_deinit == null and + allow_promise and + (jsc_vm.has_loaded or jsc_vm.is_in_preload) and + concurrent_loader.isJavaScriptLike() and + !lr.is_main and + // Plugins make this complicated, + // TODO: allow running concurrently when no onLoad handlers match a plugin. + jsc_vm.plugin_runner == null and jsc_vm.transpiler_store.enabled) + { + // This absolutely disgusting hack is a workaround in cases + // where an async import is made to a CJS file with side + // effects that other modules depend on, without incurring + // the cost of transpiling/loading CJS modules synchronously. + // + // The cause of this comes from the fact that we immediately + // and synchronously evaluate CJS modules after they've been + // transpiled, but transpiling (which, for async imports, + // happens in a thread pool), can resolve in whatever order. + // This messes up module execution order. + // + // This is only _really_ important for + // import("some-polyfill") cases, the most impactful of + // which is `reflect-metadata`. People could also use + // require or just preload their polyfills, but they aren't + // doing this. This hack makes important polyfills work without + // incurring the cost of transpiling/loading CJS modules + // synchronously. The proper fix is to evaluate CJS modules + // at the same time as ES modules. This is blocked by the + // fact that we need exports from CJS modules and our parser + // doesn't record them. + if (pkg_name) |pkg_name_| { + inline for (always_sync_modules) |always_sync_specifier| { + if (bun.strings.eqlComptime(pkg_name_, always_sync_specifier)) { + break :transpile_async; + } + } + } + + // TODO: check if the resolved source must be transpiled synchronously + return jsc_vm.transpiler_store.transpile( + jsc_vm, + globalObject, + specifier_ptr.dupeRef(), + lr.path, + referrer.dupeRef(), + concurrent_loader, + lr.package_json, + ); + } + } + } + + const synchronous_loader: options.Loader = lr.loader orelse loader: { + if (jsc_vm.has_loaded or jsc_vm.is_in_preload) { + // Extensionless files in this context are treated as the JS loader + if (lr.path.name.ext.len == 0) { + break :loader .tsx; + } + + // Unknown extensions are to be treated as file loader + if (is_commonjs_require) { + if (jsc_vm.commonjs_custom_extensions.entries.len > 0 and + jsc_vm.has_mutated_built_in_extensions == 0) + { + @branchHint(.unlikely); + if (node_module_module.findLongestRegisteredExtension(jsc_vm, lr.path.text)) |entry| { + switch (entry) { + .loader => |loader| break :loader loader, + .custom => |index| { + ret.* = JSC.ErrorableResolvedSource.ok(ResolvedSource{ + .allocator = null, + .source_code = bun.String.empty, + .specifier = .empty, + .source_url = .empty, + .cjs_custom_extension_index = index, + .tag = .common_js_custom_extension, + }); + return null; + }, + } + } + } + + // For Node.js compatibility, requiring a file with an + // unknown extension will be treated as a JS file + break :loader .ts; + } + + // For ESM, Bun treats unknown extensions as file loader + break :loader .file; + } else { + // Unless it's potentially the main module + // This is important so that "bun run ./foo-i-have-no-extension" works + break :loader .tsx; + } + }; + + if (comptime Environment.allow_assert) + debug("transpile({s}, {s}, sync)", .{ lr.specifier, @tagName(synchronous_loader) }); + + defer jsc_vm.module_loader.resetArena(jsc_vm); + + var promise: ?*JSC.JSInternalPromise = null; + ret.* = JSC.ErrorableResolvedSource.ok( + ModuleLoader.transpileSourceCode( + jsc_vm, + lr.specifier, + referrer_slice.slice(), + specifier_ptr.*, + lr.path, + synchronous_loader, + module_type, + &log, + lr.virtual_source, + if (allow_promise) &promise else null, + VirtualMachine.source_code_printer.?, + globalObject, + FetchFlags.transpile, + ) catch |err| { + if (err == error.AsyncModule) { + bun.assert(promise != null); + return promise; + } + + if (err == error.PluginError) { + return null; + } + + VirtualMachine.processFetchLog(globalObject, specifier_ptr.*, referrer.*, &log, ret, err); + return null; + }, + ); + return promise; +} + +export fn Bun__runVirtualModule(globalObject: *JSGlobalObject, specifier_ptr: *const bun.String) JSValue { + JSC.markBinding(@src()); + if (globalObject.bunVM().plugin_runner == null) return JSValue.zero; + + const specifier_slice = specifier_ptr.toUTF8(bun.default_allocator); + defer specifier_slice.deinit(); + const specifier = specifier_slice.slice(); + + if (!PluginRunner.couldBePlugin(specifier)) { + return JSValue.zero; + } + + const namespace = PluginRunner.extractNamespace(specifier); + const after_namespace = if (namespace.len == 0) + specifier + else + specifier[@min(namespace.len + 1, specifier.len)..]; + + return globalObject.runOnLoadPlugins(bun.String.init(namespace), bun.String.init(after_namespace), .bun) orelse + return JSValue.zero; +} + +fn getHardcodedModule(jsc_vm: *VirtualMachine, specifier: bun.String, hardcoded: HardcodedModule) ?ResolvedSource { + Analytics.Features.builtin_modules.insert(hardcoded); + return switch (hardcoded) { + .@"bun:main" => .{ + .allocator = null, + .source_code = bun.String.createUTF8(jsc_vm.entry_point.source.contents), + .specifier = specifier, + .source_url = specifier, + .tag = .esm, + .source_code_needs_deref = true, + }, + .@"bun:internal-for-testing" => { + if (!Environment.isDebug) { + if (!is_allowed_to_use_internal_testing_apis) + return null; + } + return jsSyntheticModule(.@"bun:internal-for-testing", specifier); + }, + .@"bun:wrap" => .{ + .allocator = null, + .source_code = String.init(Runtime.Runtime.sourceCode()), + .specifier = specifier, + .source_url = specifier, + }, + inline else => |tag| jsSyntheticModule(@field(ResolvedSource.Tag, @tagName(tag)), specifier), + }; +} + +pub fn fetchBuiltinModule(jsc_vm: *VirtualMachine, specifier: bun.String) !?ResolvedSource { + if (HardcodedModule.map.getWithEql(specifier, bun.String.eqlComptime)) |hardcoded| { + return getHardcodedModule(jsc_vm, specifier, hardcoded); + } + + if (specifier.hasPrefixComptime(js_ast.Macro.namespaceWithColon)) { + const spec = specifier.toUTF8(bun.default_allocator); + defer spec.deinit(); + if (jsc_vm.macro_entry_points.get(MacroEntryPoint.generateIDFromSpecifier(spec.slice()))) |entry| { + return .{ + .allocator = null, + .source_code = bun.String.createUTF8(entry.source.contents), + .specifier = specifier, + .source_url = specifier.dupeRef(), + }; + } + } else if (jsc_vm.standalone_module_graph) |graph| { + const specifier_utf8 = specifier.toUTF8(bun.default_allocator); + defer specifier_utf8.deinit(); + if (graph.files.getPtr(specifier_utf8.slice())) |file| { + if (file.loader == .sqlite or file.loader == .sqlite_embedded) { + const code = + \\/* Generated code */ + \\import {Database} from 'bun:sqlite'; + \\import {readFileSync} from 'node:fs'; + \\export const db = new Database(readFileSync(import.meta.path)); + \\ + \\export const __esModule = true; + \\export default db; + ; + return .{ + .allocator = null, + .source_code = bun.String.static(code), + .specifier = specifier, + .source_url = specifier.dupeRef(), + .source_code_needs_deref = false, + }; + } + + return .{ + .allocator = null, + .source_code = file.toWTFString(), + .specifier = specifier, + .source_url = specifier.dupeRef(), + .source_code_needs_deref = false, + .bytecode_cache = if (file.bytecode.len > 0) file.bytecode.ptr else null, + .bytecode_cache_size = file.bytecode.len, + .is_commonjs_module = file.module_format == .cjs, + }; + } + } + + return null; +} + +export fn Bun__transpileVirtualModule( + globalObject: *JSGlobalObject, + specifier_ptr: *const bun.String, + referrer_ptr: *const bun.String, + source_code: *ZigString, + loader_: Api.Loader, + ret: *JSC.ErrorableResolvedSource, +) bool { + JSC.markBinding(@src()); + const jsc_vm = globalObject.bunVM(); + bun.assert(jsc_vm.plugin_runner != null); + + var specifier_slice = specifier_ptr.toUTF8(jsc_vm.allocator); + const specifier = specifier_slice.slice(); + defer specifier_slice.deinit(); + var source_code_slice = source_code.toSlice(jsc_vm.allocator); + defer source_code_slice.deinit(); + var referrer_slice = referrer_ptr.toUTF8(jsc_vm.allocator); + defer referrer_slice.deinit(); + + var virtual_source = logger.Source.initPathString(specifier, source_code_slice.slice()); + var log = logger.Log.init(jsc_vm.allocator); + const path = Fs.Path.init(specifier); + + const loader = if (loader_ != ._none) + options.Loader.fromAPI(loader_) + else + jsc_vm.transpiler.options.loaders.get(path.name.ext) orelse brk: { + if (strings.eqlLong(specifier, jsc_vm.main, true)) { + break :brk options.Loader.js; + } + + break :brk options.Loader.file; + }; + + defer log.deinit(); + defer jsc_vm.module_loader.resetArena(jsc_vm); + + ret.* = JSC.ErrorableResolvedSource.ok( + ModuleLoader.transpileSourceCode( + jsc_vm, + specifier_slice.slice(), + referrer_slice.slice(), + specifier_ptr.*, + path, + loader, + .unknown, + &log, + &virtual_source, + null, + VirtualMachine.source_code_printer.?, + globalObject, + FetchFlags.transpile, + ) catch |err| { + if (err == error.PluginError) { + return true; + } + VirtualMachine.processFetchLog(globalObject, specifier_ptr.*, referrer_ptr.*, &log, ret, err); + return true; + }, + ); + Analytics.Features.virtual_modules += 1; + return true; +} + +inline fn jsSyntheticModule(name: ResolvedSource.Tag, specifier: String) ResolvedSource { + return ResolvedSource{ + .allocator = null, + .source_code = bun.String.empty, + .specifier = specifier, + .source_url = bun.String.static(@tagName(name)), + .tag = name, + .source_code_needs_deref = false, + }; +} + +/// Dumps the module source to a file in /tmp/bun-debug-src/{filepath} +/// +/// This can technically fail if concurrent access across processes happens, or permission issues. +/// Errors here should always be ignored. +fn dumpSource(vm: *VirtualMachine, specifier: string, printer: anytype) void { + dumpSourceString(vm, specifier, printer.ctx.getWritten()); +} + +fn dumpSourceString(vm: *VirtualMachine, specifier: string, written: []const u8) void { + dumpSourceStringFailiable(vm, specifier, written) catch |e| { + Output.debugWarn("Failed to dump source string: {}", .{e}); + }; +} + +fn dumpSourceStringFailiable(vm: *VirtualMachine, specifier: string, written: []const u8) !void { + if (!Environment.isDebug) return; + if (bun.getRuntimeFeatureFlag("BUN_DEBUG_NO_DUMP")) return; + + const BunDebugHolder = struct { + pub var dir: ?std.fs.Dir = null; + pub var lock: bun.Mutex = .{}; + }; + + BunDebugHolder.lock.lock(); + defer BunDebugHolder.lock.unlock(); + + const dir = BunDebugHolder.dir orelse dir: { + const base_name = switch (Environment.os) { + else => "/tmp/bun-debug-src/", + .windows => brk: { + const temp = bun.fs.FileSystem.RealFS.platformTempDir(); + var win_temp_buffer: bun.PathBuffer = undefined; + @memcpy(win_temp_buffer[0..temp.len], temp); + const suffix = "\\bun-debug-src"; + @memcpy(win_temp_buffer[temp.len .. temp.len + suffix.len], suffix); + win_temp_buffer[temp.len + suffix.len] = 0; + break :brk win_temp_buffer[0 .. temp.len + suffix.len :0]; + }, + }; + const dir = try std.fs.cwd().makeOpenPath(base_name, .{}); + BunDebugHolder.dir = dir; + break :dir dir; + }; + + if (std.fs.path.dirname(specifier)) |dir_path| { + const root_len = switch (Environment.os) { + else => "/".len, + .windows => bun.path.windowsFilesystemRoot(dir_path).len, + }; + var parent = try dir.makeOpenPath(dir_path[root_len..], .{}); + defer parent.close(); + parent.writeFile(.{ + .sub_path = std.fs.path.basename(specifier), + .data = written, + }) catch |e| { + Output.debugWarn("Failed to dump source string: writeFile {}", .{e}); + return; + }; + if (vm.source_mappings.get(specifier)) |mappings| { + defer mappings.deref(); + const map_path = std.mem.concat(bun.default_allocator, u8, &.{ std.fs.path.basename(specifier), ".map" }) catch bun.outOfMemory(); + defer bun.default_allocator.free(map_path); + const file = try parent.createFile(map_path, .{}); + defer file.close(); + + const source_file = parent.readFileAlloc( + bun.default_allocator, + specifier, + std.math.maxInt(u64), + ) catch ""; + defer bun.default_allocator.free(source_file); + + var bufw = std.io.bufferedWriter(file.writer()); + const w = bufw.writer(); + try w.print( + \\{{ + \\ "version": 3, + \\ "file": {}, + \\ "sourceRoot": "", + \\ "sources": [{}], + \\ "sourcesContent": [{}], + \\ "names": [], + \\ "mappings": "{}" + \\}} + , .{ + bun.fmt.formatJSONStringUTF8(std.fs.path.basename(specifier), .{}), + bun.fmt.formatJSONStringUTF8(specifier, .{}), + bun.fmt.formatJSONStringUTF8(source_file, .{}), + mappings.formatVLQs(), + }); + try bufw.flush(); + } + } else { + dir.writeFile(.{ + .sub_path = std.fs.path.basename(specifier), + .data = written, + }) catch return; + } +} + +fn setBreakPointOnFirstLine() bool { + const s = struct { + var set_break_point: bool = true; + }; + const ret = s.set_break_point; + s.set_break_point = false; + return ret; +} + +pub const RuntimeTranspilerStore = struct { + generation_number: std.atomic.Value(u32) = std.atomic.Value(u32).init(0), + store: TranspilerJob.Store, + enabled: bool = true, + queue: Queue = Queue{}, + + pub const Queue = bun.UnboundedQueue(TranspilerJob, .next); + + pub fn init() RuntimeTranspilerStore { + return RuntimeTranspilerStore{ + .store = TranspilerJob.Store.init(bun.typedAllocator(TranspilerJob)), + }; + } + + // This is run at the top of the event loop on the JS thread. + pub fn drain(this: *RuntimeTranspilerStore) void { + var batch = this.queue.popBatch(); + var iter = batch.iterator(); + if (iter.next()) |job| { + // we run just one job first to see if there are more + job.runFromJSThread(); + } else { + return; + } + var vm: *VirtualMachine = @fieldParentPtr("transpiler_store", this); + const event_loop = vm.eventLoop(); + const global = vm.global; + const jsc_vm = vm.jsc; + while (iter.next()) |job| { + // if there are more, we need to drain the microtasks from the previous run + event_loop.drainMicrotasksWithGlobal(global, jsc_vm); + job.runFromJSThread(); + } + + // immediately after this is called, the microtasks will be drained again. + } + + pub fn transpile( + this: *RuntimeTranspilerStore, + vm: *VirtualMachine, + globalObject: *JSGlobalObject, + input_specifier: bun.String, + path: Fs.Path, + referrer: bun.String, + loader: bun.options.Loader, + package_json: ?*const PackageJSON, + ) *anyopaque { + var job: *TranspilerJob = this.store.get(); + const owned_path = Fs.Path.init(bun.default_allocator.dupe(u8, path.text) catch unreachable); + const promise = JSC.JSInternalPromise.create(globalObject); + + // NOTE: DirInfo should already be cached since module loading happens + // after module resolution, so this should be cheap + var resolved_source = ResolvedSource{}; + if (package_json) |pkg| { + switch (pkg.module_type) { + .cjs => { + resolved_source.tag = .package_json_type_commonjs; + resolved_source.is_commonjs_module = true; + }, + .esm => resolved_source.tag = .package_json_type_module, + .unknown => {}, + } + } + + job.* = TranspilerJob{ + .non_threadsafe_input_specifier = input_specifier, + .path = owned_path, + .globalThis = globalObject, + .non_threadsafe_referrer = referrer, + .vm = vm, + .log = logger.Log.init(bun.default_allocator), + .loader = loader, + .promise = JSC.Strong.create(JSValue.fromCell(promise), globalObject), + .poll_ref = .{}, + .fetcher = TranspilerJob.Fetcher{ + .file = {}, + }, + .resolved_source = resolved_source, + }; + if (comptime Environment.allow_assert) + debug("transpile({s}, {s}, async)", .{ path.text, @tagName(job.loader) }); + job.schedule(); + return promise; + } + + pub const TranspilerJob = struct { + path: Fs.Path, + non_threadsafe_input_specifier: String, + non_threadsafe_referrer: String, + loader: options.Loader, + promise: JSC.Strong = .empty, + vm: *VirtualMachine, + globalThis: *JSGlobalObject, + fetcher: Fetcher, + poll_ref: Async.KeepAlive = .{}, + generation_number: u32 = 0, + log: logger.Log, + parse_error: ?anyerror = null, + resolved_source: ResolvedSource = ResolvedSource{}, + work_task: JSC.WorkPoolTask = .{ .callback = runFromWorkerThread }, + next: ?*TranspilerJob = null, + + pub const Store = bun.HiveArray(TranspilerJob, if (bun.heap_breakdown.enabled) 0 else 64).Fallback; + + pub const Fetcher = union(enum) { + virtual_module: bun.String, + file: void, + + pub fn deinit(this: *@This()) void { + if (this.* == .virtual_module) { + this.virtual_module.deref(); + } + } + }; + + pub fn deinit(this: *TranspilerJob) void { + bun.default_allocator.free(this.path.text); + + this.poll_ref.disable(); + this.fetcher.deinit(); + this.loader = options.Loader.file; + this.non_threadsafe_input_specifier.deref(); + this.non_threadsafe_referrer.deref(); + this.path = Fs.Path.empty; + this.log.deinit(); + this.promise.deinit(); + this.globalThis = undefined; + } + + threadlocal var ast_memory_store: ?*js_ast.ASTMemoryAllocator = null; + threadlocal var source_code_printer: ?*js_printer.BufferPrinter = null; + + pub fn dispatchToMainThread(this: *TranspilerJob) void { + this.vm.transpiler_store.queue.push(this); + this.vm.eventLoop().enqueueTaskConcurrent(JSC.ConcurrentTask.createFrom(&this.vm.transpiler_store)); + } + + pub fn runFromJSThread(this: *TranspilerJob) void { + var vm = this.vm; + const promise = this.promise.swap(); + const globalThis = this.globalThis; + this.poll_ref.unref(vm); + + const referrer = this.non_threadsafe_referrer; + this.non_threadsafe_referrer = String.empty; + var log = this.log; + this.log = logger.Log.init(bun.default_allocator); + var resolved_source = this.resolved_source; + const specifier = brk: { + if (this.parse_error != null) { + break :brk bun.String.createUTF8(this.path.text); + } + + const out = this.non_threadsafe_input_specifier; + this.non_threadsafe_input_specifier = String.empty; + + bun.debugAssert(resolved_source.source_url.isEmpty()); + bun.debugAssert(resolved_source.specifier.isEmpty()); + resolved_source.source_url = out.createIfDifferent(this.path.text); + resolved_source.specifier = out.dupeRef(); + break :brk out; + }; + + const parse_error = this.parse_error; + this.promise.deinit(); + this.deinit(); + + _ = vm.transpiler_store.store.put(this); + + ModuleLoader.AsyncModule.fulfill(globalThis, promise, resolved_source, parse_error, specifier, referrer, &log); + } + + pub fn schedule(this: *TranspilerJob) void { + this.poll_ref.ref(this.vm); + JSC.WorkPool.schedule(&this.work_task); + } + + pub fn runFromWorkerThread(work_task: *JSC.WorkPoolTask) void { + @as(*TranspilerJob, @fieldParentPtr("work_task", work_task)).run(); + } + + pub fn run(this: *TranspilerJob) void { + var arena = bun.ArenaAllocator.init(bun.default_allocator); + defer arena.deinit(); + const allocator = arena.allocator(); + + defer this.dispatchToMainThread(); + if (this.generation_number != this.vm.transpiler_store.generation_number.load(.monotonic)) { + this.parse_error = error.TranspilerJobGenerationMismatch; + return; + } + + if (ast_memory_store == null) { + ast_memory_store = bun.default_allocator.create(js_ast.ASTMemoryAllocator) catch bun.outOfMemory(); + ast_memory_store.?.* = js_ast.ASTMemoryAllocator{ + .allocator = allocator, + .previous = null, + }; + } + + ast_memory_store.?.allocator = allocator; + ast_memory_store.?.reset(); + ast_memory_store.?.push(); + + const path = this.path; + const specifier = this.path.text; + const loader = this.loader; + this.log = logger.Log.init(bun.default_allocator); + + var cache = JSC.RuntimeTranspilerCache{ + .output_code_allocator = allocator, + .sourcemap_allocator = bun.default_allocator, + }; + + var vm = this.vm; + var transpiler: bun.Transpiler = undefined; + transpiler = vm.transpiler; + transpiler.setAllocator(allocator); + transpiler.setLog(&this.log); + transpiler.resolver.opts = transpiler.options; + transpiler.macro_context = null; + transpiler.linker.resolver = &transpiler.resolver; + + var fd: ?StoredFileDescriptorType = null; + var package_json: ?*PackageJSON = null; + const hash = bun.Watcher.getHash(path.text); + + switch (vm.bun_watcher) { + .hot, .watch => { + if (vm.bun_watcher.indexOf(hash)) |index| { + const watcher_fd = vm.bun_watcher.watchlist().items(.fd)[index]; + fd = if (watcher_fd.stdioTag() == null) watcher_fd else null; + package_json = vm.bun_watcher.watchlist().items(.package_json)[index]; + } + }, + else => {}, + } + + // this should be a cheap lookup because 24 bytes == 8 * 3 so it's read 3 machine words + const is_node_override = strings.hasPrefixComptime(specifier, node_fallbacks.import_path); + + const macro_remappings = if (vm.macro_mode or !vm.has_any_macro_remappings or is_node_override) + MacroRemap{} + else + transpiler.options.macro_remap; + + var fallback_source: logger.Source = undefined; + + // Usually, we want to close the input file automatically. + // + // If we're re-using the file descriptor from the fs watcher + // Do not close it because that will break the kqueue-based watcher + // + var should_close_input_file_fd = fd == null; + + var input_file_fd: StoredFileDescriptorType = .invalid; + + const is_main = vm.main.len == path.text.len and + vm.main_hash == hash and + strings.eqlLong(vm.main, path.text, false); + + const module_type: ModuleType = switch (this.resolved_source.tag) { + .package_json_type_commonjs => .cjs, + .package_json_type_module => .esm, + else => .unknown, + }; + + var parse_options = Transpiler.ParseOptions{ + .allocator = allocator, + .path = path, + .loader = loader, + .dirname_fd = .invalid, + .file_descriptor = fd, + .file_fd_ptr = &input_file_fd, + .file_hash = hash, + .macro_remappings = macro_remappings, + .jsx = transpiler.options.jsx, + .emit_decorator_metadata = transpiler.options.emit_decorator_metadata, + .virtual_source = null, + .dont_bundle_twice = true, + .allow_commonjs = true, + .inject_jest_globals = transpiler.options.rewrite_jest_for_tests, + .set_breakpoint_on_first_line = vm.debugger != null and + vm.debugger.?.set_breakpoint_on_first_line and + is_main and + setBreakPointOnFirstLine(), + .runtime_transpiler_cache = if (!JSC.RuntimeTranspilerCache.is_disabled) &cache else null, + .remove_cjs_module_wrapper = is_main and vm.module_loader.eval_source != null, + .module_type = module_type, + .allow_bytecode_cache = true, + }; + + defer { + if (should_close_input_file_fd and input_file_fd.isValid()) { + input_file_fd.close(); + input_file_fd = .invalid; + } + } + + if (is_node_override) { + if (node_fallbacks.contentsFromPath(specifier)) |code| { + const fallback_path = Fs.Path.initWithNamespace(specifier, "node"); + fallback_source = logger.Source{ .path = fallback_path, .contents = code }; + parse_options.virtual_source = &fallback_source; + } + } + + var parse_result: bun.transpiler.ParseResult = transpiler.parseMaybeReturnFileOnlyAllowSharedBuffer( + parse_options, + null, + false, + false, + ) orelse { + if (vm.isWatcherEnabled()) { + if (input_file_fd.isValid()) { + if (!is_node_override and std.fs.path.isAbsolute(path.text) and !strings.contains(path.text, "node_modules")) { + should_close_input_file_fd = false; + _ = vm.bun_watcher.addFile( + input_file_fd, + path.text, + hash, + loader, + .invalid, + package_json, + true, + ); + } + } + } + + this.parse_error = error.ParseError; + return; + }; + + if (vm.isWatcherEnabled()) { + if (input_file_fd.isValid()) { + if (!is_node_override and + std.fs.path.isAbsolute(path.text) and !strings.contains(path.text, "node_modules")) + { + should_close_input_file_fd = false; + _ = vm.bun_watcher.addFile( + input_file_fd, + path.text, + hash, + loader, + .invalid, + package_json, + true, + ); + } + } + } + + if (cache.entry) |*entry| { + vm.source_mappings.putMappings(parse_result.source, .{ + .list = .{ .items = @constCast(entry.sourcemap), .capacity = entry.sourcemap.len }, + .allocator = bun.default_allocator, + }) catch {}; + + if (comptime Environment.dump_source) { + dumpSourceString(vm, specifier, entry.output_code.byteSlice()); + } + + this.resolved_source = ResolvedSource{ + .allocator = null, + .source_code = switch (entry.output_code) { + .string => entry.output_code.string, + .utf8 => brk: { + const result = bun.String.createUTF8(entry.output_code.utf8); + cache.output_code_allocator.free(entry.output_code.utf8); + entry.output_code.utf8 = ""; + break :brk result; + }, + }, + .is_commonjs_module = entry.metadata.module_type == .cjs, + .tag = this.resolved_source.tag, + }; + + return; + } + + if (parse_result.already_bundled != .none) { + const bytecode_slice = parse_result.already_bundled.bytecodeSlice(); + this.resolved_source = ResolvedSource{ + .allocator = null, + .source_code = bun.String.createLatin1(parse_result.source.contents), + .already_bundled = true, + .bytecode_cache = if (bytecode_slice.len > 0) bytecode_slice.ptr else null, + .bytecode_cache_size = bytecode_slice.len, + .is_commonjs_module = parse_result.already_bundled.isCommonJS(), + .tag = this.resolved_source.tag, + }; + this.resolved_source.source_code.ensureHash(); + return; + } + + for (parse_result.ast.import_records.slice()) |*import_record_| { + var import_record: *bun.ImportRecord = import_record_; + + if (JSC.ModuleLoader.HardcodedModule.Alias.get(import_record.path.text, transpiler.options.target)) |replacement| { + import_record.path.text = replacement.path; + import_record.tag = replacement.tag; + import_record.is_external_without_side_effects = true; + continue; + } + + if (transpiler.options.rewrite_jest_for_tests) { + if (strings.eqlComptime( + import_record.path.text, + "@jest/globals", + ) or strings.eqlComptime( + import_record.path.text, + "vitest", + )) { + import_record.path.namespace = "bun"; + import_record.tag = .bun_test; + import_record.path.text = "test"; + import_record.is_external_without_side_effects = true; + continue; + } + } + + if (strings.hasPrefixComptime(import_record.path.text, "bun:")) { + import_record.path = Fs.Path.init(import_record.path.text["bun:".len..]); + import_record.path.namespace = "bun"; + import_record.is_external_without_side_effects = true; + + if (strings.eqlComptime(import_record.path.text, "test")) { + import_record.tag = .bun_test; + } + } + } + + if (source_code_printer == null) { + const writer = js_printer.BufferWriter.init(bun.default_allocator); + source_code_printer = bun.default_allocator.create(js_printer.BufferPrinter) catch unreachable; + source_code_printer.?.* = js_printer.BufferPrinter.init(writer); + source_code_printer.?.ctx.append_null_byte = false; + } + + var printer = source_code_printer.?.*; + printer.ctx.reset(); + + { + var mapper = vm.sourceMapHandler(&printer); + defer source_code_printer.?.* = printer; + _ = transpiler.printWithSourceMap( + parse_result, + @TypeOf(&printer), + &printer, + .esm_ascii, + mapper.get(), + ) catch |err| { + this.parse_error = err; + return; + }; + } + + if (comptime Environment.dump_source) { + dumpSource(this.vm, specifier, &printer); + } + + const source_code = brk: { + const written = printer.ctx.getWritten(); + + const result = cache.output_code orelse bun.String.createLatin1(written); + + if (written.len > 1024 * 1024 * 2 or vm.smol) { + printer.ctx.buffer.deinit(); + source_code_printer.?.* = printer; + } + + // In a benchmarking loading @babel/standalone 100 times: + // + // After ensureHash: + // 354.00 ms 4.2% 354.00 ms WTF::StringImpl::hashSlowCase() const + // + // Before ensureHash: + // 506.00 ms 6.1% 506.00 ms WTF::StringImpl::hashSlowCase() const + // + result.ensureHash(); + + break :brk result; + }; + this.resolved_source = ResolvedSource{ + .allocator = null, + .source_code = source_code, + .is_commonjs_module = parse_result.ast.has_commonjs_export_names or parse_result.ast.exports_kind == .cjs, + .tag = this.resolved_source.tag, + }; + } + }; +}; + +pub const FetchFlags = enum { + transpile, + print_source, + print_source_and_clone, + + pub fn disableTranspiling(this: FetchFlags) bool { + return this != .transpile; + } +}; + +const SavedSourceMap = JSC.SavedSourceMap; + +pub const HardcodedModule = enum { + bun, + @"abort-controller", + @"bun:ffi", + @"bun:jsc", + @"bun:main", + @"bun:test", // usually replaced by the transpiler but `await import("bun:" + "test")` has to work + @"bun:wrap", + @"bun:sqlite", + @"node:assert", + @"node:assert/strict", + @"node:async_hooks", + @"node:buffer", + @"node:child_process", + @"node:console", + @"node:constants", + @"node:crypto", + @"node:dns", + @"node:dns/promises", + @"node:domain", + @"node:events", + @"node:fs", + @"node:fs/promises", + @"node:http", + @"node:https", + @"node:module", + @"node:net", + @"node:os", + @"node:path", + @"node:path/posix", + @"node:path/win32", + @"node:perf_hooks", + @"node:process", + @"node:querystring", + @"node:readline", + @"node:readline/promises", + @"node:stream", + @"node:stream/consumers", + @"node:stream/promises", + @"node:stream/web", + @"node:string_decoder", + @"node:test", + @"node:timers", + @"node:timers/promises", + @"node:tls", + @"node:tty", + @"node:url", + @"node:util", + @"node:util/types", + @"node:vm", + @"node:wasi", + @"node:zlib", + @"node:worker_threads", + @"node:punycode", + undici, + ws, + @"isomorphic-fetch", + @"node-fetch", + vercel_fetch, + @"utf-8-validate", + @"node:v8", + @"node:trace_events", + @"node:repl", + @"node:inspector", + @"node:http2", + @"node:diagnostics_channel", + @"node:dgram", + @"node:cluster", + @"node:_stream_duplex", + @"node:_stream_passthrough", + @"node:_stream_readable", + @"node:_stream_transform", + @"node:_stream_wrap", + @"node:_stream_writable", + @"node:_tls_common", + /// This is gated behind '--expose-internals' + @"bun:internal-for-testing", + + /// The module loader first uses `Aliases` to get a single string during + /// resolution, then maps that single string to the actual module. + /// Do not include aliases here; Those go in `Aliases`. + pub const map = bun.ComptimeStringMap(HardcodedModule, [_]struct { []const u8, HardcodedModule }{ + // Bun + .{ "bun", .bun }, + .{ "bun:ffi", .@"bun:ffi" }, + .{ "bun:jsc", .@"bun:jsc" }, + .{ "bun:main", .@"bun:main" }, + .{ "bun:test", .@"bun:test" }, + .{ "bun:sqlite", .@"bun:sqlite" }, + .{ "bun:wrap", .@"bun:wrap" }, + .{ "bun:internal-for-testing", .@"bun:internal-for-testing" }, + // Node.js + .{ "node:assert", .@"node:assert" }, + .{ "node:assert/strict", .@"node:assert/strict" }, + .{ "node:async_hooks", .@"node:async_hooks" }, + .{ "node:buffer", .@"node:buffer" }, + .{ "node:child_process", .@"node:child_process" }, + .{ "node:cluster", .@"node:cluster" }, + .{ "node:console", .@"node:console" }, + .{ "node:constants", .@"node:constants" }, + .{ "node:crypto", .@"node:crypto" }, + .{ "node:dgram", .@"node:dgram" }, + .{ "node:diagnostics_channel", .@"node:diagnostics_channel" }, + .{ "node:dns", .@"node:dns" }, + .{ "node:dns/promises", .@"node:dns/promises" }, + .{ "node:domain", .@"node:domain" }, + .{ "node:events", .@"node:events" }, + .{ "node:fs", .@"node:fs" }, + .{ "node:fs/promises", .@"node:fs/promises" }, + .{ "node:http", .@"node:http" }, + .{ "node:http2", .@"node:http2" }, + .{ "node:https", .@"node:https" }, + .{ "node:inspector", .@"node:inspector" }, + .{ "node:module", .@"node:module" }, + .{ "node:net", .@"node:net" }, + .{ "node:readline", .@"node:readline" }, + .{ "node:test", .@"node:test" }, + .{ "node:os", .@"node:os" }, + .{ "node:path", .@"node:path" }, + .{ "node:path/posix", .@"node:path/posix" }, + .{ "node:path/win32", .@"node:path/win32" }, + .{ "node:perf_hooks", .@"node:perf_hooks" }, + .{ "node:process", .@"node:process" }, + .{ "node:punycode", .@"node:punycode" }, + .{ "node:querystring", .@"node:querystring" }, + .{ "node:readline", .@"node:readline" }, + .{ "node:readline/promises", .@"node:readline/promises" }, + .{ "node:repl", .@"node:repl" }, + .{ "node:stream", .@"node:stream" }, + .{ "node:stream/consumers", .@"node:stream/consumers" }, + .{ "node:stream/promises", .@"node:stream/promises" }, + .{ "node:stream/web", .@"node:stream/web" }, + .{ "node:string_decoder", .@"node:string_decoder" }, + .{ "node:timers", .@"node:timers" }, + .{ "node:timers/promises", .@"node:timers/promises" }, + .{ "node:tls", .@"node:tls" }, + .{ "node:trace_events", .@"node:trace_events" }, + .{ "node:tty", .@"node:tty" }, + .{ "node:url", .@"node:url" }, + .{ "node:util", .@"node:util" }, + .{ "node:util/types", .@"node:util/types" }, + .{ "node:v8", .@"node:v8" }, + .{ "node:vm", .@"node:vm" }, + .{ "node:wasi", .@"node:wasi" }, + .{ "node:worker_threads", .@"node:worker_threads" }, + .{ "node:zlib", .@"node:zlib" }, + .{ "node:_stream_duplex", .@"node:_stream_duplex" }, + .{ "node:_stream_passthrough", .@"node:_stream_passthrough" }, + .{ "node:_stream_readable", .@"node:_stream_readable" }, + .{ "node:_stream_transform", .@"node:_stream_transform" }, + .{ "node:_stream_wrap", .@"node:_stream_wrap" }, + .{ "node:_stream_writable", .@"node:_stream_writable" }, + .{ "node:_tls_common", .@"node:_tls_common" }, + + .{ "node-fetch", HardcodedModule.@"node-fetch" }, + .{ "isomorphic-fetch", HardcodedModule.@"isomorphic-fetch" }, + .{ "undici", HardcodedModule.undici }, + .{ "ws", HardcodedModule.ws }, + .{ "@vercel/fetch", HardcodedModule.vercel_fetch }, + .{ "utf-8-validate", HardcodedModule.@"utf-8-validate" }, + .{ "abort-controller", HardcodedModule.@"abort-controller" }, + }); + + /// Contains the list of built-in modules from the perspective of the module + /// loader. This logic is duplicated for `isBuiltinModule` and the like. + pub const Alias = struct { + path: [:0]const u8, + tag: ImportRecord.Tag = .builtin, + node_builtin: bool = false, + node_only_prefix: bool = false, + + fn nodeEntry(path: [:0]const u8) struct { string, Alias } { + return .{ + path, + .{ + .path = if (path.len > 5 and std.mem.eql(u8, path[0..5], "node:")) path else "node:" ++ path, + .node_builtin = true, + }, + }; + } + fn nodeEntryOnlyPrefix(path: [:0]const u8) struct { string, Alias } { + return .{ + path, + .{ + .path = if (path.len > 5 and std.mem.eql(u8, path[0..5], "node:")) path else "node:" ++ path, + .node_builtin = true, + .node_only_prefix = true, + }, + }; + } + fn entry(path: [:0]const u8) struct { string, Alias } { + return .{ path, .{ .path = path } }; + } + + // Applied to both --target=bun and --target=node + const common_alias_kvs = [_]struct { string, Alias }{ + nodeEntry("node:assert"), + nodeEntry("node:assert/strict"), + nodeEntry("node:async_hooks"), + nodeEntry("node:buffer"), + nodeEntry("node:child_process"), + nodeEntry("node:cluster"), + nodeEntry("node:console"), + nodeEntry("node:constants"), + nodeEntry("node:crypto"), + nodeEntry("node:dgram"), + nodeEntry("node:diagnostics_channel"), + nodeEntry("node:dns"), + nodeEntry("node:dns/promises"), + nodeEntry("node:domain"), + nodeEntry("node:events"), + nodeEntry("node:fs"), + nodeEntry("node:fs/promises"), + nodeEntry("node:http"), + nodeEntry("node:http2"), + nodeEntry("node:https"), + nodeEntry("node:inspector"), + nodeEntry("node:module"), + nodeEntry("node:net"), + nodeEntry("node:os"), + nodeEntry("node:path"), + nodeEntry("node:path/posix"), + nodeEntry("node:path/win32"), + nodeEntry("node:perf_hooks"), + nodeEntry("node:process"), + nodeEntry("node:punycode"), + nodeEntry("node:querystring"), + nodeEntry("node:readline"), + nodeEntry("node:readline/promises"), + nodeEntry("node:repl"), + nodeEntry("node:stream"), + nodeEntry("node:stream/consumers"), + nodeEntry("node:stream/promises"), + nodeEntry("node:stream/web"), + nodeEntry("node:string_decoder"), + nodeEntry("node:timers"), + nodeEntry("node:timers/promises"), + nodeEntry("node:tls"), + nodeEntry("node:trace_events"), + nodeEntry("node:tty"), + nodeEntry("node:url"), + nodeEntry("node:util"), + nodeEntry("node:util/types"), + nodeEntry("node:v8"), + nodeEntry("node:vm"), + nodeEntry("node:wasi"), + nodeEntry("node:worker_threads"), + nodeEntry("node:zlib"), + // New Node.js builtins only resolve from the prefixed one. + nodeEntryOnlyPrefix("node:test"), + + nodeEntry("assert"), + nodeEntry("assert/strict"), + nodeEntry("async_hooks"), + nodeEntry("buffer"), + nodeEntry("child_process"), + nodeEntry("cluster"), + nodeEntry("console"), + nodeEntry("constants"), + nodeEntry("crypto"), + nodeEntry("dgram"), + nodeEntry("diagnostics_channel"), + nodeEntry("dns"), + nodeEntry("dns/promises"), + nodeEntry("domain"), + nodeEntry("events"), + nodeEntry("fs"), + nodeEntry("fs/promises"), + nodeEntry("http"), + nodeEntry("http2"), + nodeEntry("https"), + nodeEntry("inspector"), + nodeEntry("module"), + nodeEntry("net"), + nodeEntry("os"), + nodeEntry("path"), + nodeEntry("path/posix"), + nodeEntry("path/win32"), + nodeEntry("perf_hooks"), + nodeEntry("process"), + nodeEntry("punycode"), + nodeEntry("querystring"), + nodeEntry("readline"), + nodeEntry("readline/promises"), + nodeEntry("repl"), + nodeEntry("stream"), + nodeEntry("stream/consumers"), + nodeEntry("stream/promises"), + nodeEntry("stream/web"), + nodeEntry("string_decoder"), + nodeEntry("timers"), + nodeEntry("timers/promises"), + nodeEntry("tls"), + nodeEntry("trace_events"), + nodeEntry("tty"), + nodeEntry("url"), + nodeEntry("util"), + nodeEntry("util/types"), + nodeEntry("v8"), + nodeEntry("vm"), + nodeEntry("wasi"), + nodeEntry("worker_threads"), + nodeEntry("zlib"), + + // sys is a deprecated alias for util + .{ "sys", .{ .path = "node:util", .node_builtin = true } }, + .{ "node:sys", .{ .path = "node:util", .node_builtin = true } }, + + // These are returned in builtinModules, but probably not many + // packages use them so we will just alias them. + .{ "node:_http_agent", .{ .path = "node:http", .node_builtin = true } }, + .{ "node:_http_client", .{ .path = "node:http", .node_builtin = true } }, + .{ "node:_http_common", .{ .path = "node:http", .node_builtin = true } }, + .{ "node:_http_incoming", .{ .path = "node:http", .node_builtin = true } }, + .{ "node:_http_outgoing", .{ .path = "node:http", .node_builtin = true } }, + .{ "node:_http_server", .{ .path = "node:http", .node_builtin = true } }, + .{ "node:_stream_duplex", .{ .path = "node:_stream_duplex", .node_builtin = true } }, + .{ "node:_stream_passthrough", .{ .path = "node:_stream_passthrough", .node_builtin = true } }, + .{ "node:_stream_readable", .{ .path = "node:_stream_readable", .node_builtin = true } }, + .{ "node:_stream_transform", .{ .path = "node:_stream_transform", .node_builtin = true } }, + .{ "node:_stream_wrap", .{ .path = "node:_stream_wrap", .node_builtin = true } }, + .{ "node:_stream_writable", .{ .path = "node:_stream_writable", .node_builtin = true } }, + .{ "node:_tls_wrap", .{ .path = "node:tls", .node_builtin = true } }, + .{ "node:_tls_common", .{ .path = "node:_tls_common", .node_builtin = true } }, + .{ "_http_agent", .{ .path = "node:http", .node_builtin = true } }, + .{ "_http_client", .{ .path = "node:http", .node_builtin = true } }, + .{ "_http_common", .{ .path = "node:http", .node_builtin = true } }, + .{ "_http_incoming", .{ .path = "node:http", .node_builtin = true } }, + .{ "_http_outgoing", .{ .path = "node:http", .node_builtin = true } }, + .{ "_http_server", .{ .path = "node:http", .node_builtin = true } }, + .{ "_stream_duplex", .{ .path = "node:_stream_duplex", .node_builtin = true } }, + .{ "_stream_passthrough", .{ .path = "node:_stream_passthrough", .node_builtin = true } }, + .{ "_stream_readable", .{ .path = "node:_stream_readable", .node_builtin = true } }, + .{ "_stream_transform", .{ .path = "node:_stream_transform", .node_builtin = true } }, + .{ "_stream_wrap", .{ .path = "node:_stream_wrap", .node_builtin = true } }, + .{ "_stream_writable", .{ .path = "node:_stream_writable", .node_builtin = true } }, + .{ "_tls_wrap", .{ .path = "node:tls", .node_builtin = true } }, + .{ "_tls_common", .{ .path = "node:_tls_common", .node_builtin = true } }, + }; + + const bun_extra_alias_kvs = [_]struct { string, Alias }{ + .{ "bun", .{ .path = "bun", .tag = .bun } }, + .{ "bun:test", .{ .path = "bun:test", .tag = .bun_test } }, + .{ "bun:ffi", .{ .path = "bun:ffi" } }, + .{ "bun:jsc", .{ .path = "bun:jsc" } }, + .{ "bun:sqlite", .{ .path = "bun:sqlite" } }, + .{ "bun:wrap", .{ .path = "bun:wrap" } }, + .{ "bun:internal-for-testing", .{ .path = "bun:internal-for-testing" } }, + .{ "ffi", .{ .path = "bun:ffi" } }, + + // inspector/promises is not implemented, it is an alias of inspector + .{ "node:inspector/promises", .{ .path = "node:inspector", .node_builtin = true } }, + .{ "inspector/promises", .{ .path = "node:inspector", .node_builtin = true } }, + + // Thirdparty packages we override + .{ "@vercel/fetch", .{ .path = "@vercel/fetch" } }, + .{ "isomorphic-fetch", .{ .path = "isomorphic-fetch" } }, + .{ "node-fetch", .{ .path = "node-fetch" } }, + .{ "undici", .{ .path = "undici" } }, + .{ "utf-8-validate", .{ .path = "utf-8-validate" } }, + .{ "ws", .{ .path = "ws" } }, + .{ "ws/lib/websocket", .{ .path = "ws" } }, + + // Polyfills we force to native + .{ "abort-controller", .{ .path = "abort-controller" } }, + .{ "abort-controller/polyfill", .{ .path = "abort-controller" } }, + + // To force Next.js to not use bundled dependencies. + .{ "next/dist/compiled/ws", .{ .path = "ws" } }, + .{ "next/dist/compiled/node-fetch", .{ .path = "node-fetch" } }, + .{ "next/dist/compiled/undici", .{ .path = "undici" } }, + }; + + const node_extra_alias_kvs = [_]struct { string, Alias }{ + nodeEntry("node:inspector/promises"), + nodeEntry("inspector/promises"), + }; + + const node_aliases = bun.ComptimeStringMap(Alias, common_alias_kvs ++ node_extra_alias_kvs); + const bun_aliases = bun.ComptimeStringMap(Alias, common_alias_kvs ++ bun_extra_alias_kvs); + + pub fn has(name: []const u8, target: options.Target) bool { + return get(name, target) != null; + } + + pub fn get(name: []const u8, target: options.Target) ?Alias { + if (target.isBun()) { + return bun_aliases.get(name); + } else if (target.isNode()) { + return node_aliases.get(name); + } + return null; + } + }; +}; + +/// Support embedded .node files +export fn Bun__resolveEmbeddedNodeFile(vm: *VirtualMachine, in_out_str: *bun.String) bool { + if (vm.standalone_module_graph == null) return false; + + const input_path = in_out_str.toUTF8(bun.default_allocator); + defer input_path.deinit(); + const result = ModuleLoader.resolveEmbeddedFile(vm, input_path.slice(), "node") orelse return false; + in_out_str.* = bun.String.createUTF8(result); + return true; +} + +export fn ModuleLoader__isBuiltin(data: [*]const u8, len: usize) bool { + const str = data[0..len]; + return HardcodedModule.Alias.bun_aliases.get(str) != null; +} + +const std = @import("std"); +const StaticExport = @import("./bindings/static_export.zig"); +const bun = @import("bun"); +const string = bun.string; +const Output = bun.Output; +const Global = bun.Global; +const Environment = bun.Environment; +const strings = bun.strings; +const MutableString = bun.MutableString; +const stringZ = bun.stringZ; +const StoredFileDescriptorType = bun.StoredFileDescriptorType; +const Arena = @import("../allocators/mimalloc_arena.zig").Arena; + +const Allocator = std.mem.Allocator; +const IdentityContext = @import("../identity_context.zig").IdentityContext; +const Fs = @import("../fs.zig"); +const Resolver = @import("../resolver/resolver.zig"); +const ast = @import("../import_record.zig"); +const MacroEntryPoint = bun.transpiler.EntryPoints.MacroEntryPoint; +const ParseResult = bun.transpiler.ParseResult; +const logger = bun.logger; +const Api = @import("../api/schema.zig").Api; +const options = @import("../options.zig"); +const Transpiler = bun.Transpiler; +const PluginRunner = bun.transpiler.PluginRunner; +const js_printer = bun.js_printer; +const js_parser = bun.js_parser; +const js_ast = bun.JSAst; +const ImportKind = ast.ImportKind; +const Analytics = @import("../analytics/analytics_thread.zig"); +const ZigString = bun.JSC.ZigString; +const Runtime = @import("../runtime.zig"); +const Router = @import("./api/filesystem_router.zig"); +const ImportRecord = ast.ImportRecord; +const DotEnv = @import("../env_loader.zig"); +const PackageJSON = @import("../resolver/package_json.zig").PackageJSON; +const MacroRemap = @import("../resolver/package_json.zig").MacroMap; +const JSC = bun.JSC; +const JSValue = bun.JSC.JSValue; +const node_module_module = @import("./bindings/NodeModuleModule.zig"); + +const JSGlobalObject = bun.JSC.JSGlobalObject; +const ConsoleObject = bun.JSC.ConsoleObject; +const ZigException = bun.JSC.ZigException; +const ZigStackTrace = bun.JSC.ZigStackTrace; +const ResolvedSource = bun.JSC.ResolvedSource; +const JSPromise = bun.JSC.JSPromise; +const JSModuleLoader = bun.JSC.JSModuleLoader; +const JSPromiseRejectionOperation = bun.JSC.JSPromiseRejectionOperation; +const ErrorableZigString = bun.JSC.ErrorableZigString; +const VM = bun.JSC.VM; +const JSFunction = bun.JSC.JSFunction; +const Config = @import("./config.zig"); +const URL = @import("../url.zig").URL; +const Bun = JSC.API.Bun; +const EventLoop = JSC.EventLoop; +const PendingResolution = @import("../resolver/resolver.zig").PendingResolution; +const ThreadSafeFunction = bun.api.napi.ThreadSafeFunction; +const PackageManager = @import("../install/install.zig").PackageManager; +const Install = @import("../install/install.zig"); +const VirtualMachine = bun.JSC.VirtualMachine; +const Dependency = @import("../install/dependency.zig"); +const Async = bun.Async; +const String = bun.String; +const ModuleType = options.ModuleType; + +const debug = Output.scoped(.ModuleLoader, true); +const panic = std.debug.panic; diff --git a/src/bun.js/ProcessAutoKiller.zig b/src/bun.js/ProcessAutoKiller.zig new file mode 100644 index 0000000000..a4f3259072 --- /dev/null +++ b/src/bun.js/ProcessAutoKiller.zig @@ -0,0 +1,72 @@ +const ProcessAutoKiller = @This(); +const log = bun.Output.scoped(.AutoKiller, true); +const bun = @import("bun"); +const std = @import("std"); + +processes: std.AutoArrayHashMapUnmanaged(*bun.spawn.Process, void) = .{}, +enabled: bool = false, +ever_enabled: bool = false, + +pub fn enable(this: *ProcessAutoKiller) void { + this.enabled = true; + this.ever_enabled = true; +} + +pub fn disable(this: *ProcessAutoKiller) void { + this.enabled = false; +} + +pub const Result = struct { + processes: u32 = 0, + + pub fn format(self: @This(), comptime _: []const u8, _: anytype, writer: anytype) !void { + switch (self.processes) { + 0 => {}, + 1 => { + try writer.writeAll("killed 1 dangling process"); + }, + else => { + try std.fmt.format(writer, "killed {d} dangling processes", .{self.processes}); + }, + } + } +}; + +pub fn kill(this: *ProcessAutoKiller) Result { + return .{ + .processes = this.killProcesses(), + }; +} + +fn killProcesses(this: *ProcessAutoKiller) u32 { + var count: u32 = 0; + while (this.processes.pop()) |process| { + if (!process.key.hasExited()) { + log("process.kill {d}", .{process.key.pid}); + count += @as(u32, @intFromBool(process.key.kill(@intFromEnum(bun.SignalCode.default)) == .result)); + } + } + return count; +} + +pub fn clear(this: *ProcessAutoKiller) void { + if (this.processes.capacity() > 256) { + this.processes.clearAndFree(bun.default_allocator); + } + + this.processes.clearRetainingCapacity(); +} + +pub fn onSubprocessSpawn(this: *ProcessAutoKiller, process: *bun.spawn.Process) void { + if (this.enabled) + this.processes.put(bun.default_allocator, process, {}) catch {}; +} + +pub fn onSubprocessExit(this: *ProcessAutoKiller, process: *bun.spawn.Process) void { + if (this.ever_enabled) + _ = this.processes.swapRemove(process); +} + +pub fn deinit(this: *ProcessAutoKiller) void { + this.processes.deinit(bun.default_allocator); +} diff --git a/src/bun.js/ResolveMessage.zig b/src/bun.js/ResolveMessage.zig index 5d1ca78266..b98f5c2989 100644 --- a/src/bun.js/ResolveMessage.zig +++ b/src/bun.js/ResolveMessage.zig @@ -193,7 +193,7 @@ pub const ResolveMessage = struct { this: *ResolveMessage, globalThis: *JSC.JSGlobalObject, ) JSC.JSValue { - return JSC.BuildMessage.generatePositionObject(this.msg, globalThis); + return bun.api.BuildMessage.generatePositionObject(this.msg, globalThis); } pub fn getMessage( diff --git a/src/bun.js/RuntimeTranspilerCache.zig b/src/bun.js/RuntimeTranspilerCache.zig index 048a37fef2..33b0254c77 100644 --- a/src/bun.js/RuntimeTranspilerCache.zig +++ b/src/bun.js/RuntimeTranspilerCache.zig @@ -245,7 +245,7 @@ pub const RuntimeTranspilerCache = struct { } bun.assert(end_position == @as(i64, @intCast(sourcemap.len + output_bytes.len + Metadata.size))); - bun.C.preallocate_file(tmpfile.fd.cast(), 0, @intCast(end_position)) catch {}; + bun.sys.preallocate_file(tmpfile.fd.cast(), 0, @intCast(end_position)) catch {}; while (position < end_position) { const written = try bun.sys.pwritev(tmpfile.fd, vecs, position).unwrap(); if (written <= 0) { diff --git a/src/bun.js/SavedSourceMap.zig b/src/bun.js/SavedSourceMap.zig new file mode 100644 index 0000000000..b68eda6e5a --- /dev/null +++ b/src/bun.js/SavedSourceMap.zig @@ -0,0 +1,300 @@ +const SavedSourceMap = @This(); + +/// This is a pointer to the map located on the VirtualMachine struct +map: *HashTable, +mutex: bun.Mutex = .{}, + +pub const vlq_offset = 24; + +pub fn init(this: *SavedSourceMap, map: *HashTable) void { + this.* = .{ + .map = map, + .mutex = .{}, + }; + + this.map.lockPointers(); +} + +pub inline fn lock(map: *SavedSourceMap) void { + map.mutex.lock(); + map.map.unlockPointers(); +} + +pub inline fn unlock(map: *SavedSourceMap) void { + map.map.lockPointers(); + map.mutex.unlock(); +} + +// For the runtime, we store the number of mappings and how many bytes the final list is at the beginning of the array +// The first 8 bytes are the length of the array +// The second 8 bytes are the number of mappings +pub const SavedMappings = struct { + data: [*]u8, + + pub fn vlq(this: SavedMappings) []u8 { + return this.data[vlq_offset..this.len()]; + } + + pub inline fn len(this: SavedMappings) usize { + return @as(u64, @bitCast(this.data[0..8].*)); + } + + pub fn deinit(this: SavedMappings) void { + bun.default_allocator.free(this.data[0..this.len()]); + } + + pub fn toMapping(this: SavedMappings, allocator: Allocator, path: string) anyerror!ParsedSourceMap { + const result = SourceMap.Mapping.parse( + allocator, + this.data[vlq_offset..this.len()], + @as(usize, @bitCast(this.data[8..16].*)), + 1, + @as(usize, @bitCast(this.data[16..24].*)), + ); + switch (result) { + .fail => |fail| { + if (Output.enable_ansi_colors_stderr) { + try fail.toData(path).writeFormat( + Output.errorWriter(), + logger.Kind.warn, + false, + true, + ); + } else { + try fail.toData(path).writeFormat( + Output.errorWriter(), + logger.Kind.warn, + false, + false, + ); + } + + return fail.err; + }, + .success => |success| { + return success; + }, + } + } +}; + +/// ParsedSourceMap is the canonical form for sourcemaps, +/// +/// but `SavedMappings` and `SourceProviderMap` are much cheaper to construct. +/// In `fn get`, this value gets converted to ParsedSourceMap always +pub const Value = bun.TaggedPointerUnion(.{ + ParsedSourceMap, + SavedMappings, + SourceProviderMap, +}); + +pub const MissingSourceMapNoteInfo = struct { + pub var storage: bun.PathBuffer = undefined; + pub var path: ?[]const u8 = null; + pub var seen_invalid = false; + + pub fn print() void { + if (seen_invalid) return; + if (path) |note| { + Output.note("missing sourcemaps for {s}", .{note}); + Output.note("consider bundling with '--sourcemap' to get unminified traces", .{}); + } + } +}; + +pub fn putZigSourceProvider(this: *SavedSourceMap, opaque_source_provider: *anyopaque, path: []const u8) void { + const source_provider: *SourceProviderMap = @ptrCast(opaque_source_provider); + this.putValue(path, Value.init(source_provider)) catch bun.outOfMemory(); +} + +pub fn removeZigSourceProvider(this: *SavedSourceMap, opaque_source_provider: *anyopaque, path: []const u8) void { + this.lock(); + defer this.unlock(); + + const entry = this.map.getEntry(bun.hash(path)) orelse return; + const old_value = Value.from(entry.value_ptr.*); + if (old_value.get(SourceProviderMap)) |prov| { + if (@intFromPtr(prov) == @intFromPtr(opaque_source_provider)) { + // there is nothing to unref or deinit + this.map.removeByPtr(entry.key_ptr); + } + } else if (old_value.get(ParsedSourceMap)) |map| { + if (map.underlying_provider.provider()) |prov| { + if (@intFromPtr(prov) == @intFromPtr(opaque_source_provider)) { + this.map.removeByPtr(entry.key_ptr); + map.deref(); + } + } + } +} + +pub const HashTable = std.HashMap(u64, *anyopaque, bun.IdentityContext(u64), 80); + +pub fn onSourceMapChunk(this: *SavedSourceMap, chunk: SourceMap.Chunk, source: logger.Source) anyerror!void { + try this.putMappings(source, chunk.buffer); +} + +pub const SourceMapHandler = js_printer.SourceMapHandler.For(SavedSourceMap, onSourceMapChunk); + +pub fn deinit(this: *SavedSourceMap) void { + { + this.lock(); + defer this.unlock(); + + var iter = this.map.valueIterator(); + while (iter.next()) |val| { + var value = Value.from(val.*); + if (value.get(ParsedSourceMap)) |source_map| { + source_map.deref(); + } else if (value.get(SavedMappings)) |saved_mappings| { + var saved = SavedMappings{ .data = @as([*]u8, @ptrCast(saved_mappings)) }; + saved.deinit(); + } else if (value.get(SourceProviderMap)) |provider| { + _ = provider; // do nothing, we did not hold a ref to ZigSourceProvider + } + } + } + + this.map.unlockPointers(); + this.map.deinit(); +} + +pub fn putMappings(this: *SavedSourceMap, source: logger.Source, mappings: MutableString) !void { + try this.putValue(source.path.text, Value.init(bun.cast(*SavedMappings, mappings.list.items.ptr))); +} + +pub fn putValue(this: *SavedSourceMap, path: []const u8, value: Value) !void { + this.lock(); + defer this.unlock(); + + const entry = try this.map.getOrPut(bun.hash(path)); + if (entry.found_existing) { + var old_value = Value.from(entry.value_ptr.*); + if (old_value.get(ParsedSourceMap)) |parsed_source_map| { + var source_map: *ParsedSourceMap = parsed_source_map; + source_map.deref(); + } else if (old_value.get(SavedMappings)) |saved_mappings| { + var saved = SavedMappings{ .data = @as([*]u8, @ptrCast(saved_mappings)) }; + saved.deinit(); + } else if (old_value.get(SourceProviderMap)) |provider| { + _ = provider; // do nothing, we did not hold a ref to ZigSourceProvider + } + } + entry.value_ptr.* = value.ptr(); +} + +/// You must call `sourcemap.map.deref()` or you will leak memory +fn getWithContent( + this: *SavedSourceMap, + path: string, + hint: SourceMap.ParseUrlResultHint, +) SourceMap.ParseUrl { + const hash = bun.hash(path); + + // This lock is for the hash table + this.lock(); + + // This mapping entry is only valid while the mutex is locked + const mapping = this.map.getEntry(hash) orelse { + this.unlock(); + return .{}; + }; + + switch (Value.from(mapping.value_ptr.*).tag()) { + @field(Value.Tag, @typeName(ParsedSourceMap)) => { + defer this.unlock(); + const map = Value.from(mapping.value_ptr.*).as(ParsedSourceMap); + map.ref(); + return .{ .map = map }; + }, + @field(Value.Tag, @typeName(SavedMappings)) => { + defer this.unlock(); + var saved = SavedMappings{ .data = @as([*]u8, @ptrCast(Value.from(mapping.value_ptr.*).as(ParsedSourceMap))) }; + defer saved.deinit(); + const result = bun.new(ParsedSourceMap, saved.toMapping(bun.default_allocator, path) catch { + _ = this.map.remove(mapping.key_ptr.*); + return .{}; + }); + mapping.value_ptr.* = Value.init(result).ptr(); + result.ref(); + + return .{ .map = result }; + }, + @field(Value.Tag, @typeName(SourceProviderMap)) => { + const ptr: *SourceProviderMap = Value.from(mapping.value_ptr.*).as(SourceProviderMap); + this.unlock(); + + // Do not lock the mutex while we're parsing JSON! + if (ptr.getSourceMap(path, .none, hint)) |parse| { + if (parse.map) |map| { + map.ref(); + // The mutex is not locked. We have to check the hash table again. + this.putValue(path, Value.init(map)) catch bun.outOfMemory(); + + return parse; + } + } + + this.lock(); + defer this.unlock(); + // does not have a valid source map. let's not try again + _ = this.map.remove(hash); + + // Store path for a user note. + const storage = MissingSourceMapNoteInfo.storage[0..path.len]; + @memcpy(storage, path); + MissingSourceMapNoteInfo.path = storage; + return .{}; + }, + else => { + if (Environment.allow_assert) { + @panic("Corrupt pointer tag"); + } + this.unlock(); + return .{}; + }, + } +} + +/// You must `deref()` the returned value or you will leak memory +pub fn get(this: *SavedSourceMap, path: string) ?*ParsedSourceMap { + return this.getWithContent(path, .mappings_only).map; +} + +pub fn resolveMapping( + this: *SavedSourceMap, + path: []const u8, + line: i32, + column: i32, + source_handling: SourceMap.SourceContentHandling, +) ?SourceMap.Mapping.Lookup { + const parse = this.getWithContent(path, switch (source_handling) { + .no_source_contents => .mappings_only, + .source_contents => .{ .all = .{ .line = line, .column = column } }, + }); + const map = parse.map orelse return null; + + const mapping = parse.mapping orelse + SourceMap.Mapping.find(map.mappings, line, column) orelse + return null; + + return .{ + .mapping = mapping, + .source_map = map, + .prefetched_source_code = parse.source_contents, + }; +} + +const bun = @import("bun"); +const SourceMap = bun.sourcemap; +const SourceProviderMap = SourceMap.SourceProviderMap; +const ParsedSourceMap = SourceMap.ParsedSourceMap; +const string = []const u8; +const logger = bun.logger; +const Environment = bun.Environment; +const MutableString = bun.MutableString; +const js_printer = bun.js_printer; +const Output = bun.Output; + +const std = @import("std"); +const Allocator = std.mem.Allocator; diff --git a/src/bun.js/VirtualMachine.zig b/src/bun.js/VirtualMachine.zig new file mode 100644 index 0000000000..aa4982d2bb --- /dev/null +++ b/src/bun.js/VirtualMachine.zig @@ -0,0 +1,3569 @@ +//! This is the shared global state for a single JS instance execution. +//! +//! Today, Bun is one VM per thread, so the name "VirtualMachine" sort of makes +//! sense. If that changes, this should be renamed `ScriptExecutionContext`. +const VirtualMachine = @This(); +export var has_bun_garbage_collector_flag_enabled = false; +pub export var isBunTest: bool = false; + +// TODO: evaluate if this has any measurable performance impact. +pub var synthetic_allocation_limit: usize = std.math.maxInt(u32); +pub var string_allocation_limit: usize = std.math.maxInt(u32); + +comptime { + _ = Bun__remapStackFramePositions; + @export(&scriptExecutionStatus, .{ .name = "Bun__VM__scriptExecutionStatus" }); + @export(&setEntryPointEvalResultESM, .{ .name = "Bun__VM__setEntryPointEvalResultESM" }); + @export(&setEntryPointEvalResultCJS, .{ .name = "Bun__VM__setEntryPointEvalResultCJS" }); + @export(&specifierIsEvalEntryPoint, .{ .name = "Bun__VM__specifierIsEvalEntryPoint" }); + @export(&string_allocation_limit, .{ .name = "Bun__stringSyntheticAllocationLimit" }); +} + +global: *JSGlobalObject, +allocator: std.mem.Allocator, +has_loaded_constructors: bool = false, +transpiler: Transpiler, +bun_watcher: ImportWatcher = .{ .none = {} }, +console: *ConsoleObject, +log: *logger.Log, +main: []const u8 = "", +main_is_html_entrypoint: bool = false, +main_resolved_path: bun.String = bun.String.empty, +main_hash: u32 = 0, +entry_point: ServerEntryPoint = undefined, +origin: URL = URL{}, +node_fs: ?*bun.api.node.fs.NodeFS = null, +timer: bun.api.Timer.All, +event_loop_handle: ?*JSC.PlatformEventLoop = null, +pending_unref_counter: i32 = 0, +preload: []const []const u8 = &.{}, +unhandled_pending_rejection_to_capture: ?*JSValue = null, +standalone_module_graph: ?*bun.StandaloneModuleGraph = null, +smol: bool = false, +dns_result_order: DNSResolver.Order = .verbatim, + +hot_reload: bun.CLI.Command.HotReload = .none, +jsc: *VM = undefined, + +/// hide bun:wrap from stack traces +/// bun:wrap is very noisy +hide_bun_stackframes: bool = true, + +is_printing_plugin: bool = false, +is_shutting_down: bool = false, +plugin_runner: ?PluginRunner = null, +is_main_thread: bool = false, +last_reported_error_for_dedupe: JSValue = .zero, +exit_handler: ExitHandler = .{}, + +default_tls_reject_unauthorized: ?bool = null, +default_verbose_fetch: ?bun.http.HTTPVerboseLevel = null, + +/// Do not access this field directly! +/// +/// It exists in the VirtualMachine struct so that we don't accidentally +/// make a stack copy of it only use it through source_mappings. +/// +/// This proposal could let us safely move it back https://github.com/ziglang/zig/issues/7769 +saved_source_map_table: SavedSourceMap.HashTable = undefined, +source_mappings: SavedSourceMap = undefined, + +arena: *Arena = undefined, +has_loaded: bool = false, + +transpiled_count: usize = 0, +resolved_count: usize = 0, +had_errors: bool = false, + +macros: MacroMap, +macro_entry_points: std.AutoArrayHashMap(i32, *MacroEntryPoint), +macro_mode: bool = false, +no_macros: bool = false, +auto_killer: ProcessAutoKiller = .{ .enabled = false }, + +has_any_macro_remappings: bool = false, +is_from_devserver: bool = false, +has_enabled_macro_mode: bool = false, + +/// Used by bun:test to set global hooks for beforeAll, beforeEach, etc. +is_in_preload: bool = false, +has_patched_run_main: bool = false, + +transpiler_store: ModuleLoader.RuntimeTranspilerStore, + +after_event_loop_callback_ctx: ?*anyopaque = null, +after_event_loop_callback: ?JSC.OpaqueCallback = null, + +remap_stack_frames_mutex: bun.Mutex = .{}, + +/// The arguments used to launch the process _after_ the script name and bun and any flags applied to Bun +/// "bun run foo --bar" +/// ["--bar"] +/// "bun run foo baz --bar" +/// ["baz", "--bar"] +/// "bun run foo +/// [] +/// "bun foo --bar" +/// ["--bar"] +/// "bun foo baz --bar" +/// ["baz", "--bar"] +/// "bun foo +/// [] +argv: []const []const u8 = &[_][]const u8{}, + +origin_timer: std.time.Timer = undefined, +origin_timestamp: u64 = 0, +macro_event_loop: EventLoop = EventLoop{}, +regular_event_loop: EventLoop = EventLoop{}, +event_loop: *EventLoop = undefined, + +ref_strings: JSC.RefString.Map = undefined, +ref_strings_mutex: bun.Mutex = undefined, + +active_tasks: usize = 0, + +rare_data: ?*JSC.RareData = null, +is_us_loop_entered: bool = false, +pending_internal_promise: ?*JSInternalPromise = null, +entry_point_result: struct { + value: JSC.Strong = .empty, + cjs_set_value: bool = false, +} = .{}, + +auto_install_dependencies: bool = false, + +onUnhandledRejection: *const OnUnhandledRejection = defaultOnUnhandledRejection, +onUnhandledRejectionCtx: ?*anyopaque = null, +onUnhandledRejectionExceptionList: ?*ExceptionList = null, +unhandled_error_counter: usize = 0, +is_handling_uncaught_exception: bool = false, +exit_on_uncaught_exception: bool = false, + +modules: ModuleLoader.AsyncModule.Queue = .{}, +aggressive_garbage_collection: GCLevel = GCLevel.none, + +module_loader: ModuleLoader = .{}, + +gc_controller: JSC.GarbageCollectionController = .{}, +worker: ?*webcore.WebWorker = null, +ipc: ?IPCInstanceUnion = null, + +debugger: ?JSC.Debugger = null, +has_started_debugger: bool = false, +has_terminated: bool = false, + +debug_thread_id: if (Environment.allow_assert) std.Thread.Id else void, + +body_value_hive_allocator: webcore.Body.Value.HiveAllocator = undefined, + +is_inside_deferred_task_queue: bool = false, + +// defaults off. .on("message") will set it to true unless overridden +// process.channel.unref() will set it to false and mark it overridden +// on disconnect it will be disabled +channel_ref: bun.Async.KeepAlive = .{}, +// if process.channel.ref() or unref() has been called, this is set to true +channel_ref_overridden: bool = false, +// if one disconnect event listener should be ignored +channel_ref_should_ignore_one_disconnect_event_listener: bool = false, + +/// Whether this VM should be destroyed after it exits, even if it is the main thread's VM. +/// Worker VMs are always destroyed on exit, regardless of this setting. Setting this to +/// true may expose bugs that would otherwise only occur using Workers. Controlled by +/// Options.destruct_main_thread_on_exit. +destruct_main_thread_on_exit: bool, + +/// A set of extensions that exist in the require.extensions map. Keys +/// contain the leading '.'. Value is either a loader for built in +/// functions, or an index into JSCommonJSExtensions. +/// +/// `.keys() == transpiler.resolver.opts.extra_cjs_extensions`, so +/// mutations in this map must update the resolver. +commonjs_custom_extensions: bun.StringArrayHashMapUnmanaged(node_module_module.CustomLoader.Packed) = .empty, +/// Incremented when the `require.extensions` for a built-in extension is mutated. +/// An example is mutating `require.extensions['.js']` to intercept all '.js' files. +/// The value is decremented when defaults are restored. +has_mutated_built_in_extensions: u32 = 0, + +pub const ProcessAutoKiller = @import("ProcessAutoKiller.zig"); +pub const OnUnhandledRejection = fn (*VirtualMachine, globalObject: *JSGlobalObject, JSValue) void; + +pub const OnException = fn (*ZigException) void; + +pub fn initRequestBodyValue(this: *VirtualMachine, body: JSC.WebCore.Body.Value) !*Body.Value.HiveRef { + return .init(body, &this.body_value_hive_allocator); +} + +pub threadlocal var is_bundler_thread_for_bytecode_cache: bool = false; + +pub fn uwsLoop(this: *const VirtualMachine) *uws.Loop { + if (comptime Environment.isPosix) { + if (Environment.allow_assert) { + return this.event_loop_handle orelse @panic("uws event_loop_handle is null"); + } + return this.event_loop_handle.?; + } + + return uws.Loop.get(); +} + +pub fn uvLoop(this: *const VirtualMachine) *bun.Async.Loop { + if (Environment.allow_assert) { + return this.event_loop_handle orelse @panic("libuv event_loop_handle is null"); + } + return this.event_loop_handle.?; +} + +pub fn isMainThread(this: *const VirtualMachine) bool { + return this.worker == null; +} + +pub fn isInspectorEnabled(this: *const VirtualMachine) bool { + return this.debugger != null; +} + +pub fn isShuttingDown(this: *const VirtualMachine) bool { + return this.is_shutting_down; +} + +pub fn getTLSRejectUnauthorized(this: *const VirtualMachine) bool { + return this.default_tls_reject_unauthorized orelse this.transpiler.env.getTLSRejectUnauthorized(); +} + +pub fn onSubprocessSpawn(this: *VirtualMachine, process: *bun.spawn.Process) void { + this.auto_killer.onSubprocessSpawn(process); +} + +pub fn onSubprocessExit(this: *VirtualMachine, process: *bun.spawn.Process) void { + this.auto_killer.onSubprocessExit(process); +} + +pub fn getVerboseFetch(this: *VirtualMachine) bun.http.HTTPVerboseLevel { + return this.default_verbose_fetch orelse { + if (this.transpiler.env.get("BUN_CONFIG_VERBOSE_FETCH")) |verbose_fetch| { + if (strings.eqlComptime(verbose_fetch, "true") or strings.eqlComptime(verbose_fetch, "1")) { + this.default_verbose_fetch = .headers; + return .headers; + } else if (strings.eqlComptime(verbose_fetch, "curl")) { + this.default_verbose_fetch = .curl; + return .curl; + } + } + this.default_verbose_fetch = .none; + return .none; + }; +} + +pub const VMHolder = struct { + pub threadlocal var vm: ?*VirtualMachine = null; + pub threadlocal var cached_global_object: ?*JSGlobalObject = null; + pub var main_thread_vm: ?*VirtualMachine = null; + pub export fn Bun__setDefaultGlobalObject(global: *JSGlobalObject) void { + if (vm) |vm_instance| { + vm_instance.global = global; + + // Ensure this is always set when it should be. + if (vm_instance.is_main_thread) { + VMHolder.main_thread_vm = vm_instance; + } + } + + cached_global_object = global; + } + + pub export fn Bun__getDefaultGlobalObject() ?*JSGlobalObject { + return cached_global_object orelse { + if (vm) |vm_instance| { + cached_global_object = vm_instance.global; + } + return null; + }; + } + + pub export fn Bun__thisThreadHasVM() bool { + return vm != null; + } +}; + +pub inline fn get() *VirtualMachine { + return VMHolder.vm.?; +} + +pub fn getMainThreadVM() ?*VirtualMachine { + return VMHolder.main_thread_vm; +} + +pub fn mimeType(this: *VirtualMachine, str: []const u8) ?bun.http.MimeType { + return this.rareData().mimeTypeFromString(this.allocator, str); +} + +pub fn onAfterEventLoop(this: *VirtualMachine) void { + if (this.after_event_loop_callback) |cb| { + const ctx = this.after_event_loop_callback_ctx; + this.after_event_loop_callback = null; + this.after_event_loop_callback_ctx = null; + cb(ctx); + } +} + +pub fn isEventLoopAliveExcludingImmediates(vm: *const VirtualMachine) bool { + return vm.unhandled_error_counter == 0 and + (@intFromBool(vm.event_loop_handle.?.isActive()) + + vm.active_tasks + + vm.event_loop.tasks.count + + @intFromBool(vm.event_loop.hasPendingRefs()) > 0); +} + +pub fn isEventLoopAlive(vm: *const VirtualMachine) bool { + return vm.isEventLoopAliveExcludingImmediates() or + // We need to keep running in this case so that immediate tasks get run. But immediates + // intentionally don't make the event loop _active_ so we need to check for them + // separately. + vm.event_loop.immediate_tasks.items.len > 0 or + vm.event_loop.next_immediate_tasks.items.len > 0; +} + +pub fn wakeup(this: *VirtualMachine) void { + this.eventLoop().wakeup(); +} + +const SourceMapHandlerGetter = struct { + vm: *VirtualMachine, + printer: *js_printer.BufferPrinter, + + pub fn get(this: *SourceMapHandlerGetter) js_printer.SourceMapHandler { + if (this.vm.debugger == null or this.vm.debugger.?.mode == .connect) { + return SavedSourceMap.SourceMapHandler.init(&this.vm.source_mappings); + } + + return js_printer.SourceMapHandler.For(SourceMapHandlerGetter, onChunk).init(this); + } + + /// When the inspector is enabled, we want to generate an inline sourcemap. + /// And, for now, we also store it in source_mappings like normal + /// This is hideously expensive memory-wise... + pub fn onChunk(this: *SourceMapHandlerGetter, chunk: SourceMap.Chunk, source: logger.Source) anyerror!void { + var temp_json_buffer = bun.MutableString.initEmpty(bun.default_allocator); + defer temp_json_buffer.deinit(); + temp_json_buffer = try chunk.printSourceMapContentsAtOffset(source, temp_json_buffer, true, SavedSourceMap.vlq_offset, true); + const source_map_url_prefix_start = "//# sourceMappingURL=data:application/json;base64,"; + // TODO: do we need to %-encode the path? + const source_url_len = source.path.text.len; + const source_mapping_url = "\n//# sourceURL="; + const prefix_len = source_map_url_prefix_start.len + source_mapping_url.len + source_url_len; + + try this.vm.source_mappings.putMappings(source, chunk.buffer); + const encode_len = bun.base64.encodeLen(temp_json_buffer.list.items); + try this.printer.ctx.buffer.growIfNeeded(encode_len + prefix_len + 2); + this.printer.ctx.buffer.appendAssumeCapacity("\n" ++ source_map_url_prefix_start); + _ = bun.base64.encode(this.printer.ctx.buffer.list.items.ptr[this.printer.ctx.buffer.len()..this.printer.ctx.buffer.list.capacity], temp_json_buffer.list.items); + this.printer.ctx.buffer.list.items.len += encode_len; + this.printer.ctx.buffer.appendAssumeCapacity(source_mapping_url); + // TODO: do we need to %-encode the path? + this.printer.ctx.buffer.appendAssumeCapacity(source.path.text); + try this.printer.ctx.buffer.append("\n"); + } +}; + +pub inline fn sourceMapHandler(this: *VirtualMachine, printer: *js_printer.BufferPrinter) SourceMapHandlerGetter { + return SourceMapHandlerGetter{ + .vm = this, + .printer = printer, + }; +} + +pub const GCLevel = enum(u3) { + none = 0, + mild = 1, + aggressive = 2, +}; + +pub threadlocal var is_main_thread_vm: bool = false; + +pub const UnhandledRejectionScope = struct { + ctx: ?*anyopaque = null, + onUnhandledRejection: *const OnUnhandledRejection = undefined, + count: usize = 0, + + pub fn apply(this: *UnhandledRejectionScope, vm: *JSC.VirtualMachine) void { + vm.onUnhandledRejection = this.onUnhandledRejection; + vm.onUnhandledRejectionCtx = this.ctx; + vm.unhandled_error_counter = this.count; + } +}; + +pub fn onQuietUnhandledRejectionHandler(this: *VirtualMachine, _: *JSGlobalObject, _: JSValue) void { + this.unhandled_error_counter += 1; +} + +pub fn onQuietUnhandledRejectionHandlerCaptureValue(this: *VirtualMachine, _: *JSGlobalObject, value: JSValue) void { + this.unhandled_error_counter += 1; + value.ensureStillAlive(); + if (this.unhandled_pending_rejection_to_capture) |ptr| { + ptr.* = value; + } +} + +pub fn unhandledRejectionScope(this: *VirtualMachine) UnhandledRejectionScope { + return .{ + .onUnhandledRejection = this.onUnhandledRejection, + .ctx = this.onUnhandledRejectionCtx, + .count = this.unhandled_error_counter, + }; +} + +fn ensureSourceCodePrinter(this: *VirtualMachine) void { + if (source_code_printer == null) { + const allocator = if (bun.heap_breakdown.enabled) bun.heap_breakdown.namedAllocator("SourceCode") else this.allocator; + const writer = js_printer.BufferWriter.init(allocator); + source_code_printer = allocator.create(js_printer.BufferPrinter) catch unreachable; + source_code_printer.?.* = js_printer.BufferPrinter.init(writer); + source_code_printer.?.ctx.append_null_byte = false; + } +} + +pub fn loadExtraEnvAndSourceCodePrinter(this: *VirtualMachine) void { + var map = this.transpiler.env.map; + + ensureSourceCodePrinter(this); + + if (map.get("BUN_SHOW_BUN_STACKFRAMES") != null) { + this.hide_bun_stackframes = false; + } + + if (bun.getRuntimeFeatureFlag("BUN_FEATURE_FLAG_DISABLE_ASYNC_TRANSPILER")) { + this.transpiler_store.enabled = false; + } + + if (map.map.fetchSwapRemove("NODE_CHANNEL_FD")) |kv| { + const fd_s = kv.value.value; + const mode = if (map.map.fetchSwapRemove("NODE_CHANNEL_SERIALIZATION_MODE")) |mode_kv| + IPC.Mode.fromString(mode_kv.value.value) orelse .json + else + .json; + + IPC.log("IPC environment variables: NODE_CHANNEL_FD={s}, NODE_CHANNEL_SERIALIZATION_MODE={s}", .{ fd_s, @tagName(mode) }); + if (std.fmt.parseInt(u31, fd_s, 10)) |fd| { + this.initIPCInstance(.fromUV(fd), mode); + } else |_| { + Output.warn("Failed to parse IPC channel number '{s}'", .{fd_s}); + } + } + + // Node.js checks if this are set to "1" and no other value + if (map.get("NODE_PRESERVE_SYMLINKS")) |value| { + this.transpiler.resolver.opts.preserve_symlinks = bun.strings.eqlComptime(value, "1"); + } + + if (map.get("BUN_GARBAGE_COLLECTOR_LEVEL")) |gc_level| { + // Reuse this flag for other things to avoid unnecessary hashtable + // lookups on start for obscure flags which we do not want others to + // depend on. + if (map.get("BUN_FEATURE_FLAG_FORCE_WAITER_THREAD") != null) { + bun.spawn.process.WaiterThread.setShouldUseWaiterThread(); + } + + // Only allowed for testing + if (map.get("BUN_FEATURE_FLAG_INTERNAL_FOR_TESTING") != null) { + ModuleLoader.is_allowed_to_use_internal_testing_apis = true; + } + + if (strings.eqlComptime(gc_level, "1")) { + this.aggressive_garbage_collection = .mild; + has_bun_garbage_collector_flag_enabled = true; + } else if (strings.eqlComptime(gc_level, "2")) { + this.aggressive_garbage_collection = .aggressive; + has_bun_garbage_collector_flag_enabled = true; + } + + if (map.get("BUN_FEATURE_FLAG_SYNTHETIC_MEMORY_LIMIT")) |value| { + if (std.fmt.parseInt(usize, value, 10)) |limit| { + synthetic_allocation_limit = limit; + string_allocation_limit = limit; + } else |_| { + Output.panic("BUN_FEATURE_FLAG_SYNTHETIC_MEMORY_LIMIT must be a positive integer", .{}); + } + } + } +} + +extern fn Bun__handleUncaughtException(*JSGlobalObject, err: JSValue, is_rejection: c_int) c_int; +extern fn Bun__handleUnhandledRejection(*JSGlobalObject, reason: JSValue, promise: JSValue) c_int; + +pub fn unhandledRejection(this: *JSC.VirtualMachine, globalObject: *JSGlobalObject, reason: JSValue, promise: JSValue) bool { + if (this.isShuttingDown()) { + Output.debugWarn("unhandledRejection during shutdown.", .{}); + return true; + } + + if (isBunTest) { + this.unhandled_error_counter += 1; + this.onUnhandledRejection(this, globalObject, reason); + return true; + } + + const handled = Bun__handleUnhandledRejection(globalObject, reason, promise) > 0; + if (!handled) { + this.unhandled_error_counter += 1; + this.onUnhandledRejection(this, globalObject, reason); + } + return handled; +} + +pub fn uncaughtException(this: *JSC.VirtualMachine, globalObject: *JSGlobalObject, err: JSValue, is_rejection: bool) bool { + if (this.isShuttingDown()) { + Output.debugWarn("uncaughtException during shutdown.", .{}); + return true; + } + + if (isBunTest) { + this.unhandled_error_counter += 1; + this.onUnhandledRejection(this, globalObject, err); + return true; + } + + if (this.is_handling_uncaught_exception) { + this.runErrorHandler(err, null); + bun.api.node.process.exit(globalObject, 7); + @panic("Uncaught exception while handling uncaught exception"); + } + if (this.exit_on_uncaught_exception) { + this.runErrorHandler(err, null); + bun.api.node.process.exit(globalObject, 1); + @panic("made it past Bun__Process__exit"); + } + this.is_handling_uncaught_exception = true; + defer this.is_handling_uncaught_exception = false; + const handled = Bun__handleUncaughtException(globalObject, err.toError() orelse err, if (is_rejection) 1 else 0) > 0; + if (!handled) { + // TODO maybe we want a separate code path for uncaught exceptions + this.unhandled_error_counter += 1; + this.exit_handler.exit_code = 1; + this.onUnhandledRejection(this, globalObject, err); + } + return handled; +} + +pub fn handlePendingInternalPromiseRejection(this: *JSC.VirtualMachine) void { + var promise = this.pending_internal_promise.?; + if (promise.status(this.global.vm()) == .rejected and !promise.isHandled(this.global.vm())) { + _ = this.unhandledRejection(this.global, promise.result(this.global.vm()), promise.asValue()); + promise.setHandled(this.global.vm()); + } +} + +pub fn defaultOnUnhandledRejection(this: *JSC.VirtualMachine, _: *JSGlobalObject, value: JSValue) void { + this.runErrorHandler(value, this.onUnhandledRejectionExceptionList); +} + +pub inline fn packageManager(this: *VirtualMachine) *PackageManager { + return this.transpiler.getPackageManager(); +} + +pub fn garbageCollect(this: *const VirtualMachine, sync: bool) usize { + @branchHint(.cold); + Global.mimalloc_cleanup(false); + if (sync) + return this.global.vm().runGC(true); + + this.global.vm().collectAsync(); + return this.global.vm().heapSize(); +} + +pub inline fn autoGarbageCollect(this: *const VirtualMachine) void { + if (this.aggressive_garbage_collection != .none) { + _ = this.garbageCollect(this.aggressive_garbage_collection == .aggressive); + } +} + +pub fn reload(this: *VirtualMachine, _: *HotReloader.Task) void { + Output.debug("Reloading...", .{}); + const should_clear_terminal = !this.transpiler.env.hasSetNoClearTerminalOnReload(!Output.enable_ansi_colors); + if (this.hot_reload == .watch) { + Output.flush(); + bun.reloadProcess( + bun.default_allocator, + should_clear_terminal, + false, + ); + } + + if (should_clear_terminal) { + Output.flush(); + Output.disableBuffering(); + Output.resetTerminalAll(); + Output.enableBuffering(); + } + + this.global.reload(); + this.pending_internal_promise = this.reloadEntryPoint(this.main) catch @panic("Failed to reload"); +} + +pub inline fn nodeFS(this: *VirtualMachine) *Node.fs.NodeFS { + return this.node_fs orelse brk: { + this.node_fs = bun.default_allocator.create(Node.fs.NodeFS) catch unreachable; + this.node_fs.?.* = Node.fs.NodeFS{ + // only used when standalone module graph is enabled + .vm = if (this.standalone_module_graph != null) this else null, + }; + break :brk this.node_fs.?; + }; +} + +pub inline fn rareData(this: *VirtualMachine) *JSC.RareData { + return this.rare_data orelse brk: { + this.rare_data = this.allocator.create(JSC.RareData) catch unreachable; + this.rare_data.?.* = .{}; + break :brk this.rare_data.?; + }; +} + +pub inline fn eventLoop(this: *VirtualMachine) *EventLoop { + return this.event_loop; +} + +pub fn prepareLoop(_: *VirtualMachine) void {} + +pub fn enterUWSLoop(this: *VirtualMachine) void { + var loop = this.event_loop_handle.?; + loop.run(); +} + +pub fn onBeforeExit(this: *VirtualMachine) void { + this.exit_handler.dispatchOnBeforeExit(); + var dispatch = false; + while (true) { + while (this.isEventLoopAlive()) : (dispatch = true) { + this.tick(); + this.eventLoop().autoTickActive(); + } + + if (dispatch) { + this.exit_handler.dispatchOnBeforeExit(); + dispatch = false; + + if (this.isEventLoopAlive()) continue; + } + + break; + } +} + +pub fn scriptExecutionStatus(this: *const VirtualMachine) callconv(.C) JSC.ScriptExecutionStatus { + if (this.is_shutting_down) { + return .stopped; + } + + if (this.worker) |worker| { + if (worker.hasRequestedTerminate()) { + return .stopped; + } + } + + return .running; +} + +pub fn specifierIsEvalEntryPoint(this: *VirtualMachine, specifier: JSValue) callconv(.C) bool { + if (this.module_loader.eval_source) |eval_source| { + var specifier_str = specifier.toBunString(this.global) catch @panic("unexpected exception"); + defer specifier_str.deref(); + return specifier_str.eqlUTF8(eval_source.path.text); + } + + return false; +} + +pub fn setEntryPointEvalResultESM(this: *VirtualMachine, result: JSValue) callconv(.C) void { + // allow esm evaluate to set value multiple times + if (!this.entry_point_result.cjs_set_value) { + this.entry_point_result.value.set(this.global, result); + } +} + +pub fn setEntryPointEvalResultCJS(this: *VirtualMachine, value: JSValue) callconv(.C) void { + if (!this.entry_point_result.value.has()) { + this.entry_point_result.value.set(this.global, value); + this.entry_point_result.cjs_set_value = true; + } +} + +pub fn onExit(this: *VirtualMachine) void { + this.exit_handler.dispatchOnExit(); + this.is_shutting_down = true; + + const rare_data = this.rare_data orelse return; + defer rare_data.cleanup_hooks.clearAndFree(bun.default_allocator); + // Make sure we run new cleanup hooks introduced by running cleanup hooks + while (rare_data.cleanup_hooks.items.len > 0) { + var hooks = rare_data.cleanup_hooks; + defer hooks.deinit(bun.default_allocator); + rare_data.cleanup_hooks = .{}; + for (hooks.items) |hook| { + hook.execute(); + } + } +} + +extern fn Zig__GlobalObject__destructOnExit(*JSGlobalObject) void; + +pub fn globalExit(this: *VirtualMachine) noreturn { + if (this.destruct_main_thread_on_exit and this.is_main_thread) { + Zig__GlobalObject__destructOnExit(this.global); + this.deinit(); + } + bun.Global.exit(this.exit_handler.exit_code); +} + +pub fn nextAsyncTaskID(this: *VirtualMachine) u64 { + var debugger: *JSC.Debugger = &(this.debugger orelse return 0); + debugger.next_debugger_id +%= 1; + return debugger.next_debugger_id; +} + +pub fn hotMap(this: *VirtualMachine) ?*JSC.RareData.HotMap { + if (this.hot_reload != .hot) { + return null; + } + + return this.rareData().hotMap(this.allocator); +} + +pub inline fn enqueueTask(this: *VirtualMachine, task: JSC.Task) void { + this.eventLoop().enqueueTask(task); +} + +pub inline fn enqueueImmediateTask(this: *VirtualMachine, task: *bun.api.Timer.ImmediateObject) void { + this.eventLoop().enqueueImmediateTask(task); +} + +pub inline fn enqueueTaskConcurrent(this: *VirtualMachine, task: *JSC.ConcurrentTask) void { + this.eventLoop().enqueueTaskConcurrent(task); +} + +pub fn tick(this: *VirtualMachine) void { + this.eventLoop().tick(); +} + +pub fn waitFor(this: *VirtualMachine, cond: *bool) void { + while (!cond.*) { + this.eventLoop().tick(); + + if (!cond.*) { + this.eventLoop().autoTick(); + } + } +} + +pub fn waitForPromise(this: *VirtualMachine, promise: JSC.AnyPromise) void { + this.eventLoop().waitForPromise(promise); +} + +pub fn waitForTasks(this: *VirtualMachine) void { + this.eventLoop().waitForTasks(); +} + +pub const MacroMap = std.AutoArrayHashMap(i32, JSC.C.JSObjectRef); + +pub fn enableMacroMode(this: *VirtualMachine) void { + JSC.markBinding(@src()); + + if (!this.has_enabled_macro_mode) { + this.has_enabled_macro_mode = true; + this.macro_event_loop.tasks = EventLoop.Queue.init(default_allocator); + this.macro_event_loop.tasks.ensureTotalCapacity(16) catch unreachable; + this.macro_event_loop.global = this.global; + this.macro_event_loop.virtual_machine = this; + this.macro_event_loop.concurrent_tasks = .{}; + ensureSourceCodePrinter(this); + } + + this.transpiler.options.target = .bun_macro; + this.transpiler.resolver.caches.fs.use_alternate_source_cache = true; + this.macro_mode = true; + this.event_loop = &this.macro_event_loop; + bun.Analytics.Features.macros += 1; + this.transpiler_store.enabled = false; +} + +pub fn disableMacroMode(this: *VirtualMachine) void { + this.transpiler.options.target = .bun; + this.transpiler.resolver.caches.fs.use_alternate_source_cache = false; + this.macro_mode = false; + this.event_loop = &this.regular_event_loop; + this.transpiler_store.enabled = true; +} + +pub fn isWatcherEnabled(this: *VirtualMachine) bool { + return this.bun_watcher != .none; +} + +/// Instead of storing timestamp as a i128, we store it as a u64. +/// We subtract the timestamp from Jan 1, 2000 (Y2K) +pub const origin_relative_epoch = 946684800 * std.time.ns_per_s; +fn getOriginTimestamp() u64 { + return @as( + u64, + @truncate(@as( + u128, + // handle if they set their system clock to be before epoch + @intCast(@max( + std.time.nanoTimestamp(), + origin_relative_epoch, + )), + ) - origin_relative_epoch), + ); +} + +pub inline fn isLoaded() bool { + return VMHolder.vm != null; +} +const RuntimeTranspilerStore = JSC.ModuleLoader.RuntimeTranspilerStore; +pub fn initWithModuleGraph( + opts: Options, +) !*VirtualMachine { + JSC.markBinding(@src()); + const allocator = opts.allocator; + VMHolder.vm = try allocator.create(VirtualMachine); + const console = try allocator.create(ConsoleObject); + console.* = ConsoleObject.init(Output.errorWriter(), Output.writer()); + const log = opts.log.?; + const transpiler = try Transpiler.init( + allocator, + log, + opts.args, + null, + ); + var vm = VMHolder.vm.?; + + vm.* = VirtualMachine{ + .global = undefined, + .transpiler_store = RuntimeTranspilerStore.init(), + .allocator = allocator, + .entry_point = ServerEntryPoint{}, + .transpiler = transpiler, + .console = console, + .log = log, + .timer = bun.api.Timer.All.init(), + .origin = transpiler.options.origin, + .saved_source_map_table = SavedSourceMap.HashTable.init(bun.default_allocator), + .source_mappings = undefined, + .macros = MacroMap.init(allocator), + .macro_entry_points = @TypeOf(vm.macro_entry_points).init(allocator), + .origin_timer = std.time.Timer.start() catch @panic("Timers are not supported on this system."), + .origin_timestamp = getOriginTimestamp(), + .ref_strings = JSC.RefString.Map.init(allocator), + .ref_strings_mutex = .{}, + .standalone_module_graph = opts.graph.?, + .debug_thread_id = if (Environment.allow_assert) std.Thread.getCurrentId(), + .destruct_main_thread_on_exit = opts.destruct_main_thread_on_exit, + }; + vm.source_mappings.init(&vm.saved_source_map_table); + vm.regular_event_loop.tasks = EventLoop.Queue.init( + default_allocator, + ); + vm.regular_event_loop.virtual_machine = vm; + vm.regular_event_loop.tasks.ensureUnusedCapacity(64) catch unreachable; + vm.regular_event_loop.concurrent_tasks = .{}; + vm.event_loop = &vm.regular_event_loop; + + vm.transpiler.macro_context = null; + vm.transpiler.resolver.store_fd = false; + vm.transpiler.resolver.prefer_module_field = false; + + vm.transpiler.resolver.onWakePackageManager = .{ + .context = &vm.modules, + .handler = ModuleLoader.AsyncModule.Queue.onWakeHandler, + .onDependencyError = ModuleLoader.AsyncModule.Queue.onDependencyError, + }; + + vm.transpiler.resolver.standalone_module_graph = opts.graph.?; + + // Avoid reading from tsconfig.json & package.json when we're in standalone mode + vm.transpiler.configureLinkerWithAutoJSX(false); + + vm.transpiler.macro_context = js_ast.Macro.MacroContext.init(&vm.transpiler); + if (opts.is_main_thread) { + VMHolder.main_thread_vm = vm; + } + vm.global = JSGlobalObject.create( + vm, + vm.console, + if (opts.is_main_thread) 1 else std.math.maxInt(i32), + false, + false, + null, + ); + vm.regular_event_loop.global = vm.global; + vm.jsc = vm.global.vm(); + uws.Loop.get().internal_loop_data.jsc_vm = vm.jsc; + + vm.configureDebugger(opts.debugger); + vm.body_value_hive_allocator = Body.Value.HiveAllocator.init(bun.typedAllocator(JSC.WebCore.Body.Value)); + + return vm; +} + +export fn Bun__isMainThreadVM() callconv(.C) bool { + return get().is_main_thread; +} + +pub const Options = struct { + allocator: std.mem.Allocator, + args: Api.TransformOptions, + log: ?*logger.Log = null, + env_loader: ?*DotEnv.Loader = null, + store_fd: bool = false, + smol: bool = false, + dns_result_order: DNSResolver.Order = .verbatim, + + // --print needs the result from evaluating the main module + eval: bool = false, + + graph: ?*bun.StandaloneModuleGraph = null, + debugger: bun.CLI.Command.Debugger = .{ .unspecified = {} }, + is_main_thread: bool = false, + /// Whether this VM should be destroyed after it exits, even if it is the main thread's VM. + /// Worker VMs are always destroyed on exit, regardless of this setting. Setting this to + /// true may expose bugs that would otherwise only occur using Workers. + destruct_main_thread_on_exit: bool = false, +}; + +pub var is_smol_mode = false; + +pub fn init(opts: Options) !*VirtualMachine { + JSC.markBinding(@src()); + const allocator = opts.allocator; + var log: *logger.Log = undefined; + if (opts.log) |__log| { + log = __log; + } else { + log = try allocator.create(logger.Log); + log.* = logger.Log.init(allocator); + } + + VMHolder.vm = try allocator.create(VirtualMachine); + const console = try allocator.create(ConsoleObject); + console.* = ConsoleObject.init(Output.errorWriter(), Output.writer()); + const transpiler = try Transpiler.init( + allocator, + log, + try Config.configureTransformOptionsForBunVM(allocator, opts.args), + opts.env_loader, + ); + var vm = VMHolder.vm.?; + if (opts.is_main_thread) { + VMHolder.main_thread_vm = vm; + } + vm.* = VirtualMachine{ + .global = undefined, + .transpiler_store = RuntimeTranspilerStore.init(), + .allocator = allocator, + .entry_point = ServerEntryPoint{}, + .transpiler = transpiler, + .console = console, + .log = log, + + .timer = bun.api.Timer.All.init(), + + .origin = transpiler.options.origin, + + .saved_source_map_table = SavedSourceMap.HashTable.init(bun.default_allocator), + .source_mappings = undefined, + .macros = MacroMap.init(allocator), + .macro_entry_points = @TypeOf(vm.macro_entry_points).init(allocator), + .origin_timer = std.time.Timer.start() catch @panic("Please don't mess with timers."), + .origin_timestamp = getOriginTimestamp(), + .ref_strings = JSC.RefString.Map.init(allocator), + .ref_strings_mutex = .{}, + .debug_thread_id = if (Environment.allow_assert) std.Thread.getCurrentId(), + .destruct_main_thread_on_exit = opts.destruct_main_thread_on_exit, + }; + vm.source_mappings.init(&vm.saved_source_map_table); + vm.regular_event_loop.tasks = EventLoop.Queue.init( + default_allocator, + ); + + vm.regular_event_loop.virtual_machine = vm; + vm.regular_event_loop.tasks.ensureUnusedCapacity(64) catch unreachable; + vm.regular_event_loop.concurrent_tasks = .{}; + vm.event_loop = &vm.regular_event_loop; + + vm.transpiler.macro_context = null; + vm.transpiler.resolver.store_fd = opts.store_fd; + vm.transpiler.resolver.prefer_module_field = false; + vm.transpiler.resolver.opts.preserve_symlinks = opts.args.preserve_symlinks orelse false; + + vm.transpiler.resolver.onWakePackageManager = .{ + .context = &vm.modules, + .handler = ModuleLoader.AsyncModule.Queue.onWakeHandler, + .onDependencyError = ModuleLoader.AsyncModule.Queue.onDependencyError, + }; + + vm.transpiler.configureLinker(); + + vm.transpiler.macro_context = js_ast.Macro.MacroContext.init(&vm.transpiler); + + vm.global = JSGlobalObject.create( + vm, + vm.console, + if (opts.is_main_thread) 1 else std.math.maxInt(i32), + opts.smol, + opts.eval, + null, + ); + vm.regular_event_loop.global = vm.global; + vm.jsc = vm.global.vm(); + uws.Loop.get().internal_loop_data.jsc_vm = vm.jsc; + vm.smol = opts.smol; + vm.dns_result_order = opts.dns_result_order; + + if (opts.smol) + is_smol_mode = opts.smol; + + vm.configureDebugger(opts.debugger); + vm.body_value_hive_allocator = Body.Value.HiveAllocator.init(bun.typedAllocator(JSC.WebCore.Body.Value)); + + return vm; +} + +pub inline fn assertOnJSThread(vm: *const VirtualMachine) void { + if (Environment.allow_assert) { + if (vm.debug_thread_id != std.Thread.getCurrentId()) { + std.debug.panic("Expected to be on the JS thread.", .{}); + } + } +} + +fn configureDebugger(this: *VirtualMachine, cli_flag: bun.CLI.Command.Debugger) void { + if (bun.getenvZ("HYPERFINE_RANDOMIZED_ENVIRONMENT_OFFSET") != null) { + return; + } + + const unix = bun.getenvZ("BUN_INSPECT") orelse ""; + const connect_to = bun.getenvZ("BUN_INSPECT_CONNECT_TO") orelse ""; + + const set_breakpoint_on_first_line = unix.len > 0 and strings.endsWith(unix, "?break=1"); // If we should set a breakpoint on the first line + const wait_for_debugger = unix.len > 0 and strings.endsWith(unix, "?wait=1"); // If we should wait for the debugger to connect before starting the event loop + + const wait_for_connection: JSC.Debugger.Wait = if (set_breakpoint_on_first_line or wait_for_debugger) .forever else .off; + + switch (cli_flag) { + .unspecified => { + if (unix.len > 0) { + this.debugger = .{ + .path_or_port = null, + .from_environment_variable = unix, + .wait_for_connection = wait_for_connection, + .set_breakpoint_on_first_line = set_breakpoint_on_first_line, + }; + } else if (connect_to.len > 0) { + // This works in the vscode debug terminal because that relies on unix or notify being set, which they + // are in the debug terminal. This branch doesn't reach + this.debugger = .{ + .path_or_port = null, + .from_environment_variable = connect_to, + .wait_for_connection = .off, + .set_breakpoint_on_first_line = false, + .mode = .connect, + }; + } + }, + .enable => { + this.debugger = .{ + .path_or_port = cli_flag.enable.path_or_port, + .from_environment_variable = unix, + .wait_for_connection = if (cli_flag.enable.wait_for_connection) .forever else wait_for_connection, + .set_breakpoint_on_first_line = set_breakpoint_on_first_line or cli_flag.enable.set_breakpoint_on_first_line, + }; + }, + } + + if (this.isInspectorEnabled() and this.debugger.?.mode != .connect) { + this.transpiler.options.minify_identifiers = false; + this.transpiler.options.minify_syntax = false; + this.transpiler.options.minify_whitespace = false; + this.transpiler.options.debugger = true; + } +} + +pub fn initWorker( + worker: *webcore.WebWorker, + opts: Options, +) anyerror!*VirtualMachine { + JSC.markBinding(@src()); + var log: *logger.Log = undefined; + const allocator = opts.allocator; + if (opts.log) |__log| { + log = __log; + } else { + log = try allocator.create(logger.Log); + log.* = logger.Log.init(allocator); + } + + VMHolder.vm = try allocator.create(VirtualMachine); + const console = try allocator.create(ConsoleObject); + console.* = ConsoleObject.init(Output.errorWriter(), Output.writer()); + const transpiler = try Transpiler.init( + allocator, + log, + try Config.configureTransformOptionsForBunVM(allocator, opts.args), + opts.env_loader, + ); + var vm = VMHolder.vm.?; + + vm.* = VirtualMachine{ + .global = undefined, + .allocator = allocator, + .transpiler_store = RuntimeTranspilerStore.init(), + .entry_point = ServerEntryPoint{}, + .transpiler = transpiler, + .console = console, + .log = log, + + .timer = bun.api.Timer.All.init(), + .origin = transpiler.options.origin, + + .saved_source_map_table = SavedSourceMap.HashTable.init(bun.default_allocator), + .source_mappings = undefined, + .macros = MacroMap.init(allocator), + .macro_entry_points = @TypeOf(vm.macro_entry_points).init(allocator), + .origin_timer = std.time.Timer.start() catch @panic("Please don't mess with timers."), + .origin_timestamp = getOriginTimestamp(), + .ref_strings = JSC.RefString.Map.init(allocator), + .ref_strings_mutex = .{}, + .standalone_module_graph = worker.parent.standalone_module_graph, + .worker = worker, + .debug_thread_id = if (Environment.allow_assert) std.Thread.getCurrentId(), + // This option is irrelevant for Workers + .destruct_main_thread_on_exit = false, + }; + vm.source_mappings.init(&vm.saved_source_map_table); + vm.regular_event_loop.tasks = EventLoop.Queue.init( + default_allocator, + ); + + vm.regular_event_loop.virtual_machine = vm; + vm.regular_event_loop.tasks.ensureUnusedCapacity(64) catch unreachable; + vm.regular_event_loop.concurrent_tasks = .{}; + vm.event_loop = &vm.regular_event_loop; + vm.hot_reload = worker.parent.hot_reload; + vm.transpiler.macro_context = null; + vm.transpiler.resolver.store_fd = opts.store_fd; + vm.transpiler.resolver.prefer_module_field = false; + vm.transpiler.resolver.onWakePackageManager = .{ + .context = &vm.modules, + .handler = ModuleLoader.AsyncModule.Queue.onWakeHandler, + .onDependencyError = ModuleLoader.AsyncModule.Queue.onDependencyError, + }; + vm.transpiler.resolver.standalone_module_graph = opts.graph; + + if (opts.graph == null) { + vm.transpiler.configureLinker(); + } else { + vm.transpiler.configureLinkerWithAutoJSX(false); + } + + vm.smol = opts.smol; + vm.transpiler.macro_context = js_ast.Macro.MacroContext.init(&vm.transpiler); + + vm.global = JSGlobalObject.create( + vm, + vm.console, + @as(i32, @intCast(worker.execution_context_id)), + worker.mini, + opts.eval, + worker.cpp_worker, + ); + vm.regular_event_loop.global = vm.global; + vm.jsc = vm.global.vm(); + uws.Loop.get().internal_loop_data.jsc_vm = vm.jsc; + vm.transpiler.setAllocator(allocator); + vm.body_value_hive_allocator = Body.Value.HiveAllocator.init(bun.typedAllocator(JSC.WebCore.Body.Value)); + + return vm; +} + +pub fn initBake(opts: Options) anyerror!*VirtualMachine { + JSC.markBinding(@src()); + const allocator = opts.allocator; + var log: *logger.Log = undefined; + if (opts.log) |__log| { + log = __log; + } else { + log = try allocator.create(logger.Log); + log.* = logger.Log.init(allocator); + } + + VMHolder.vm = try allocator.create(VirtualMachine); + const console = try allocator.create(ConsoleObject); + console.* = ConsoleObject.init(Output.errorWriter(), Output.writer()); + const transpiler = try Transpiler.init( + allocator, + log, + try Config.configureTransformOptionsForBunVM(allocator, opts.args), + opts.env_loader, + ); + var vm = VMHolder.vm.?; + + vm.* = VirtualMachine{ + .global = undefined, + .transpiler_store = RuntimeTranspilerStore.init(), + .allocator = allocator, + .entry_point = ServerEntryPoint{}, + .transpiler = transpiler, + .console = console, + .log = log, + .timer = bun.api.Timer.All.init(), + .origin = transpiler.options.origin, + .saved_source_map_table = SavedSourceMap.HashTable.init(bun.default_allocator), + .source_mappings = undefined, + .macros = MacroMap.init(allocator), + .macro_entry_points = @TypeOf(vm.macro_entry_points).init(allocator), + .origin_timer = std.time.Timer.start() catch @panic("Please don't mess with timers."), + .origin_timestamp = getOriginTimestamp(), + .ref_strings = JSC.RefString.Map.init(allocator), + .ref_strings_mutex = .{}, + .debug_thread_id = if (Environment.allow_assert) std.Thread.getCurrentId(), + .destruct_main_thread_on_exit = opts.destruct_main_thread_on_exit, + }; + vm.source_mappings.init(&vm.saved_source_map_table); + vm.regular_event_loop.tasks = EventLoop.Queue.init( + default_allocator, + ); + + vm.regular_event_loop.virtual_machine = vm; + vm.regular_event_loop.tasks.ensureUnusedCapacity(64) catch unreachable; + vm.regular_event_loop.concurrent_tasks = .{}; + vm.event_loop = &vm.regular_event_loop; + vm.eventLoop().ensureWaker(); + + vm.transpiler.macro_context = null; + vm.transpiler.resolver.store_fd = opts.store_fd; + vm.transpiler.resolver.prefer_module_field = false; + + vm.transpiler.resolver.onWakePackageManager = .{ + .context = &vm.modules, + .handler = ModuleLoader.AsyncModule.Queue.onWakeHandler, + .onDependencyError = ModuleLoader.AsyncModule.Queue.onDependencyError, + }; + + vm.transpiler.configureLinker(); + + vm.transpiler.macro_context = js_ast.Macro.MacroContext.init(&vm.transpiler); + + vm.smol = opts.smol; + + if (opts.smol) + is_smol_mode = opts.smol; + + vm.configureDebugger(opts.debugger); + vm.body_value_hive_allocator = Body.Value.HiveAllocator.init(bun.typedAllocator(JSC.WebCore.Body.Value)); + + return vm; +} + +pub threadlocal var source_code_printer: ?*js_printer.BufferPrinter = null; + +pub fn clearRefString(_: *anyopaque, ref_string: *JSC.RefString) void { + _ = VirtualMachine.get().ref_strings.remove(ref_string.hash); +} + +pub fn refCountedResolvedSource(this: *VirtualMachine, code: []const u8, specifier: bun.String, source_url: []const u8, hash_: ?u32, comptime add_double_ref: bool) ResolvedSource { + // refCountedString will panic if the code is empty + if (code.len == 0) { + return ResolvedSource{ + .source_code = bun.String.init(""), + .specifier = specifier, + .source_url = specifier.createIfDifferent(source_url), + .allocator = null, + .source_code_needs_deref = false, + }; + } + var source = this.refCountedString(code, hash_, !add_double_ref); + if (add_double_ref) { + source.ref(); + source.ref(); + } + + return ResolvedSource{ + .source_code = bun.String.init(source.impl), + .specifier = specifier, + .source_url = specifier.createIfDifferent(source_url), + .allocator = source, + .source_code_needs_deref = false, + }; +} + +fn refCountedStringWithWasNew(this: *VirtualMachine, new: *bool, input_: []const u8, hash_: ?u32, comptime dupe: bool) *JSC.RefString { + JSC.markBinding(@src()); + bun.assert(input_.len > 0); + const hash = hash_ orelse JSC.RefString.computeHash(input_); + this.ref_strings_mutex.lock(); + defer this.ref_strings_mutex.unlock(); + + const entry = this.ref_strings.getOrPut(hash) catch unreachable; + if (!entry.found_existing) { + const input = if (comptime dupe) + (this.allocator.dupe(u8, input_) catch unreachable) + else + input_; + + const ref = this.allocator.create(JSC.RefString) catch unreachable; + ref.* = JSC.RefString{ + .allocator = this.allocator, + .ptr = input.ptr, + .len = input.len, + .impl = bun.String.createExternal(*JSC.RefString, input, true, ref, &freeRefString).value.WTFStringImpl, + .hash = hash, + .ctx = this, + .onBeforeDeinit = VirtualMachine.clearRefString, + }; + entry.value_ptr.* = ref; + } + new.* = !entry.found_existing; + return entry.value_ptr.*; +} + +fn freeRefString(str: *JSC.RefString, _: *anyopaque, _: u32) callconv(.C) void { + str.deinit(); +} + +pub fn refCountedString(this: *VirtualMachine, input_: []const u8, hash_: ?u32, comptime dupe: bool) *JSC.RefString { + bun.assert(input_.len > 0); + var _was_new = false; + return this.refCountedStringWithWasNew(&_was_new, input_, hash_, comptime dupe); +} + +pub fn fetchWithoutOnLoadPlugins( + jsc_vm: *VirtualMachine, + globalObject: *JSGlobalObject, + _specifier: String, + referrer: String, + log: *logger.Log, + comptime flags: FetchFlags, +) anyerror!ResolvedSource { + bun.assert(VirtualMachine.isLoaded()); + + if (try ModuleLoader.fetchBuiltinModule(jsc_vm, _specifier)) |builtin| { + return builtin; + } + + const specifier_clone = _specifier.toUTF8(bun.default_allocator); + defer specifier_clone.deinit(); + const referrer_clone = referrer.toUTF8(bun.default_allocator); + defer referrer_clone.deinit(); + + var virtual_source_to_use: ?logger.Source = null; + var blob_to_deinit: ?JSC.WebCore.Blob = null; + defer if (blob_to_deinit) |*blob| blob.deinit(); + const lr = options.getLoaderAndVirtualSource(specifier_clone.slice(), jsc_vm, &virtual_source_to_use, &blob_to_deinit, null) catch { + return error.ModuleNotFound; + }; + const module_type: options.ModuleType = if (lr.package_json) |pkg| pkg.module_type else .unknown; + + // .print_source, which is used by exceptions avoids duplicating the entire source code + // but that means we have to be careful of the lifetime of the source code + // so we only want to reset the arena once its done freeing it. + defer if (flags != .print_source) jsc_vm.module_loader.resetArena(jsc_vm); + errdefer if (flags == .print_source) jsc_vm.module_loader.resetArena(jsc_vm); + + return try ModuleLoader.transpileSourceCode( + jsc_vm, + lr.specifier, + referrer_clone.slice(), + _specifier, + lr.path, + lr.loader orelse if (lr.is_main) .js else .file, + module_type, + log, + lr.virtual_source, + null, + VirtualMachine.source_code_printer.?, + globalObject, + flags, + ); +} + +pub const ResolveFunctionResult = struct { + result: ?Resolver.Result, + path: string, + query_string: []const u8 = "", +}; + +fn normalizeSpecifierForResolution(specifier_: []const u8, query_string: *[]const u8) []const u8 { + var specifier = specifier_; + + if (strings.indexOfChar(specifier, '?')) |i| { + query_string.* = specifier[i..]; + specifier = specifier[0..i]; + } + + return specifier; +} + +threadlocal var specifier_cache_resolver_buf: bun.PathBuffer = undefined; +fn _resolve( + jsc_vm: *VirtualMachine, + ret: *ResolveFunctionResult, + specifier: string, + source: string, + is_esm: bool, + comptime is_a_file_path: bool, +) !void { + if (strings.eqlComptime(std.fs.path.basename(specifier), Runtime.Runtime.Imports.alt_name)) { + ret.path = Runtime.Runtime.Imports.Name; + return; + } else if (strings.eqlComptime(specifier, main_file_name)) { + ret.result = null; + ret.path = jsc_vm.entry_point.source.path.text; + return; + } else if (strings.hasPrefixComptime(specifier, js_ast.Macro.namespaceWithColon)) { + ret.result = null; + ret.path = specifier; + return; + } else if (strings.hasPrefixComptime(specifier, node_fallbacks.import_path)) { + ret.result = null; + ret.path = specifier; + return; + } else if (JSC.ModuleLoader.HardcodedModule.Alias.get(specifier, .bun)) |result| { + ret.result = null; + ret.path = result.path; + return; + } else if (jsc_vm.module_loader.eval_source != null and + (strings.endsWithComptime(specifier, bun.pathLiteral("/[eval]")) or + strings.endsWithComptime(specifier, bun.pathLiteral("/[stdin]")))) + { + ret.result = null; + ret.path = specifier; + return; + } else if (strings.hasPrefixComptime(specifier, "blob:")) { + ret.result = null; + if (JSC.WebCore.ObjectURLRegistry.singleton().has(specifier["blob:".len..])) { + ret.path = specifier; + return; + } else { + return error.ModuleNotFound; + } + } + + const is_special_source = strings.eqlComptime(source, main_file_name) or js_ast.Macro.isMacroPath(source); + var query_string: []const u8 = ""; + const normalized_specifier = normalizeSpecifierForResolution(specifier, &query_string); + const source_to_use = if (!is_special_source) + if (is_a_file_path) + Fs.PathName.init(source).dirWithTrailingSlash() + else + source + else + jsc_vm.transpiler.fs.top_level_dir; + + const result: Resolver.Result = try brk: { + // TODO: We only want to retry on not found only when the directories we searched for were cached. + // This fixes an issue where new files created in cached directories were not picked up. + // See https://github.com/oven-sh/bun/issues/3216 + // + // This cache-bust is disabled when the filesystem is not being used to resolve. + var retry_on_not_found = std.fs.path.isAbsolute(source_to_use); + while (true) { + break :brk switch (jsc_vm.transpiler.resolver.resolveAndAutoInstall( + source_to_use, + normalized_specifier, + if (is_esm) .stmt else .require, + if (jsc_vm.standalone_module_graph == null) .read_only else .disable, + )) { + .success => |r| r, + .failure => |e| e, + .pending, .not_found => if (!retry_on_not_found) + error.ModuleNotFound + else { + retry_on_not_found = false; + + const buster_name = name: { + if (std.fs.path.isAbsolute(normalized_specifier)) { + if (std.fs.path.dirname(normalized_specifier)) |dir| { + // Normalized without trailing slash + break :name bun.strings.normalizeSlashesOnly(&specifier_cache_resolver_buf, dir, std.fs.path.sep); + } + } + + var parts = [_]string{ + source_to_use, + normalized_specifier, + bun.pathLiteral(".."), + }; + + break :name bun.path.joinAbsStringBufZ( + jsc_vm.transpiler.fs.top_level_dir, + &specifier_cache_resolver_buf, + &parts, + .auto, + ); + }; + + // Only re-query if we previously had something cached. + if (jsc_vm.transpiler.resolver.bustDirCache(bun.strings.withoutTrailingSlashWindowsPath(buster_name))) { + continue; + } + + return error.ModuleNotFound; + }, + }; + } + }; + + if (!jsc_vm.macro_mode) { + jsc_vm.has_any_macro_remappings = jsc_vm.has_any_macro_remappings or jsc_vm.transpiler.options.macro_remap.count() > 0; + } + ret.result = result; + ret.query_string = query_string; + const result_path = result.pathConst() orelse return error.ModuleNotFound; + jsc_vm.resolved_count += 1; + + ret.path = result_path.text; +} + +pub fn resolve( + res: *ErrorableString, + global: *JSGlobalObject, + specifier: bun.String, + source: bun.String, + query_string: ?*ZigString, + is_esm: bool, +) !void { + try resolveMaybeNeedsTrailingSlash(res, global, specifier, source, query_string, is_esm, true, false); +} + +fn normalizeSource(source: []const u8) []const u8 { + if (strings.hasPrefixComptime(source, "file://")) { + return source["file://".len..]; + } + + return source; +} + +pub fn resolveMaybeNeedsTrailingSlash( + res: *ErrorableString, + global: *JSGlobalObject, + specifier: bun.String, + source: bun.String, + query_string: ?*ZigString, + is_esm: bool, + comptime is_a_file_path: bool, + is_user_require_resolve: bool, +) bun.JSError!void { + if (is_a_file_path and specifier.length() > comptime @as(u32, @intFromFloat(@trunc(@as(f64, @floatFromInt(bun.MAX_PATH_BYTES)) * 1.5)))) { + const specifier_utf8 = specifier.toUTF8(bun.default_allocator); + defer specifier_utf8.deinit(); + const source_utf8 = source.toUTF8(bun.default_allocator); + defer source_utf8.deinit(); + const printed = bun.api.ResolveMessage.fmt( + bun.default_allocator, + specifier_utf8.slice(), + source_utf8.slice(), + error.NameTooLong, + if (is_esm) .stmt else if (is_user_require_resolve) .require_resolve else .require, + ) catch bun.outOfMemory(); + const msg = logger.Msg{ + .data = logger.rangeData( + null, + logger.Range.None, + printed, + ), + }; + res.* = ErrorableString.err(error.NameTooLong, bun.api.ResolveMessage.create(global, VirtualMachine.get().allocator, msg, source_utf8.slice()).asVoid()); + return; + } + + var result = ResolveFunctionResult{ .path = "", .result = null }; + const jsc_vm = global.bunVM(); + const specifier_utf8 = specifier.toUTF8(bun.default_allocator); + defer specifier_utf8.deinit(); + + const source_utf8 = source.toUTF8(bun.default_allocator); + defer source_utf8.deinit(); + if (jsc_vm.plugin_runner) |plugin_runner| { + if (PluginRunner.couldBePlugin(specifier_utf8.slice())) { + const namespace = PluginRunner.extractNamespace(specifier_utf8.slice()); + const after_namespace = if (namespace.len == 0) + specifier_utf8.slice() + else + specifier_utf8.slice()[namespace.len + 1 .. specifier_utf8.len]; + + if (try plugin_runner.onResolveJSC(bun.String.init(namespace), bun.String.fromUTF8(after_namespace), source, .bun)) |resolved_path| { + res.* = resolved_path; + return; + } + } + } + + if (JSC.ModuleLoader.HardcodedModule.Alias.get(specifier_utf8.slice(), .bun)) |hardcoded| { + res.* = ErrorableString.ok( + if (is_user_require_resolve and hardcoded.node_builtin) + specifier + else + bun.String.init(hardcoded.path), + ); + return; + } + + const old_log = jsc_vm.log; + // the logger can end up being called on another thread, it must not use threadlocal Heap Allocator + var log = logger.Log.init(bun.default_allocator); + defer log.deinit(); + jsc_vm.log = &log; + jsc_vm.transpiler.resolver.log = &log; + jsc_vm.transpiler.linker.log = &log; + defer { + jsc_vm.log = old_log; + jsc_vm.transpiler.linker.log = old_log; + jsc_vm.transpiler.resolver.log = old_log; + } + jsc_vm._resolve(&result, specifier_utf8.slice(), normalizeSource(source_utf8.slice()), is_esm, is_a_file_path) catch |err_| { + var err = err_; + const msg: logger.Msg = brk: { + const msgs: []logger.Msg = log.msgs.items; + + for (msgs) |m| { + if (m.metadata == .resolve) { + err = m.metadata.resolve.err; + break :brk m; + } + } + + const import_kind: bun.ImportKind = if (is_esm) + .stmt + else if (is_user_require_resolve) + .require_resolve + else + .require; + + const printed = try bun.api.ResolveMessage.fmt( + jsc_vm.allocator, + specifier_utf8.slice(), + source_utf8.slice(), + err, + import_kind, + ); + break :brk logger.Msg{ + .data = logger.rangeData( + null, + logger.Range.None, + printed, + ), + .metadata = .{ + .resolve = .{ + .specifier = logger.BabyString.in(printed, specifier_utf8.slice()), + .import_kind = import_kind, + }, + }, + }; + }; + + { + res.* = ErrorableString.err(err, bun.api.ResolveMessage.create(global, VirtualMachine.get().allocator, msg, source_utf8.slice()).asVoid()); + } + + return; + }; + + if (query_string) |query| { + query.* = ZigString.init(result.query_string); + } + + res.* = ErrorableString.ok(bun.String.init(result.path)); +} + +pub const main_file_name: string = "bun:main"; + +pub export fn Bun__drainMicrotasksFromJS(globalObject: *JSGlobalObject, callframe: *JSC.CallFrame) callconv(JSC.conv) JSValue { + _ = callframe; // autofix + globalObject.bunVM().drainMicrotasks(); + return .undefined; +} + +pub fn drainMicrotasks(this: *VirtualMachine) void { + this.eventLoop().drainMicrotasks(); +} + +pub fn processFetchLog(globalThis: *JSGlobalObject, specifier: bun.String, referrer: bun.String, log: *logger.Log, ret: *ErrorableResolvedSource, err: anyerror) void { + switch (log.msgs.items.len) { + 0 => { + const msg: logger.Msg = brk: { + if (err == error.UnexpectedPendingResolution) { + break :brk logger.Msg{ + .data = logger.rangeData( + null, + logger.Range.None, + std.fmt.allocPrint(globalThis.allocator(), "Unexpected pending import in \"{}\". To automatically install npm packages with Bun, please use an import statement instead of require() or dynamic import().\nThis error can also happen if dependencies import packages which are not referenced anywhere. Worst case, run `bun install` and opt-out of the node_modules folder until we come up with a better way to handle this error.", .{specifier}) catch unreachable, + ), + }; + } + + break :brk logger.Msg{ + .data = logger.rangeData(null, logger.Range.None, std.fmt.allocPrint(globalThis.allocator(), "{s} while building {}", .{ @errorName(err), specifier }) catch unreachable), + }; + }; + { + ret.* = ErrorableResolvedSource.err(err, bun.api.BuildMessage.create(globalThis, globalThis.allocator(), msg).asVoid()); + } + return; + }, + + 1 => { + const msg = log.msgs.items[0]; + ret.* = ErrorableResolvedSource.err(err, switch (msg.metadata) { + .build => bun.api.BuildMessage.create(globalThis, globalThis.allocator(), msg).asVoid(), + .resolve => bun.api.ResolveMessage.create( + globalThis, + globalThis.allocator(), + msg, + referrer.toUTF8(bun.default_allocator).slice(), + ).asVoid(), + }); + return; + }, + else => { + var errors_stack: [256]JSValue = undefined; + + const len = @min(log.msgs.items.len, errors_stack.len); + const errors = errors_stack[0..len]; + const logs = log.msgs.items[0..len]; + + for (logs, errors) |msg, *current| { + current.* = switch (msg.metadata) { + .build => bun.api.BuildMessage.create(globalThis, globalThis.allocator(), msg), + .resolve => bun.api.ResolveMessage.create( + globalThis, + globalThis.allocator(), + msg, + referrer.toUTF8(bun.default_allocator).slice(), + ), + }; + } + + ret.* = ErrorableResolvedSource.err( + err, + globalThis.createAggregateError( + errors, + &ZigString.init( + std.fmt.allocPrint(globalThis.allocator(), "{d} errors building \"{}\"", .{ + errors.len, + specifier, + }) catch unreachable, + ), + ).asVoid(), + ); + }, + } +} + +// TODO: +pub fn deinit(this: *VirtualMachine) void { + this.auto_killer.deinit(); + + if (source_code_printer) |print| { + print.getMutableBuffer().deinit(); + print.ctx.written = &.{}; + } + this.source_mappings.deinit(); + if (this.rare_data) |rare_data| { + rare_data.deinit(); + } + this.has_terminated = true; +} + +pub const ExceptionList = std.ArrayList(Api.JsException); + +pub fn printException( + this: *VirtualMachine, + exception: *Exception, + exception_list: ?*ExceptionList, + comptime Writer: type, + writer: Writer, + comptime allow_side_effects: bool, +) void { + var formatter = ConsoleObject.Formatter{ + .globalThis = this.global, + .quote_strings = false, + .single_line = false, + .stack_check = bun.StackCheck.init(), + }; + defer formatter.deinit(); + if (Output.enable_ansi_colors) { + this.printErrorlikeObject(exception.value(), exception, exception_list, &formatter, Writer, writer, true, allow_side_effects); + } else { + this.printErrorlikeObject(exception.value(), exception, exception_list, &formatter, Writer, writer, false, allow_side_effects); + } +} + +pub fn runErrorHandlerWithDedupe(this: *VirtualMachine, result: JSValue, exception_list: ?*ExceptionList) void { + if (this.last_reported_error_for_dedupe == result and !this.last_reported_error_for_dedupe.isEmptyOrUndefinedOrNull()) + return; + + this.runErrorHandler(result, exception_list); +} + +pub noinline fn runErrorHandler(this: *VirtualMachine, result: JSValue, exception_list: ?*ExceptionList) void { + @branchHint(.cold); + if (!result.isEmptyOrUndefinedOrNull()) + this.last_reported_error_for_dedupe = result; + + const prev_had_errors = this.had_errors; + this.had_errors = false; + defer this.had_errors = prev_had_errors; + + const error_writer = Output.errorWriter(); + var buffered_writer = std.io.bufferedWriter(error_writer); + defer { + buffered_writer.flush() catch {}; + } + + const writer = buffered_writer.writer(); + + if (result.isException(this.global.vm())) { + const exception = @as(*Exception, @ptrCast(result.asVoid())); + this.printException( + exception, + exception_list, + @TypeOf(writer), + writer, + true, + ); + } else { + var formatter = ConsoleObject.Formatter{ + .globalThis = this.global, + .quote_strings = false, + .single_line = false, + .stack_check = bun.StackCheck.init(), + .error_display_level = .full, + }; + defer formatter.deinit(); + switch (Output.enable_ansi_colors) { + inline else => |enable_colors| this.printErrorlikeObject(result, null, exception_list, &formatter, @TypeOf(writer), writer, enable_colors, true), + } + } +} + +export fn Bun__logUnhandledException(exception: JSValue) void { + get().runErrorHandler(exception, null); +} + +pub fn clearEntryPoint( + this: *VirtualMachine, +) void { + if (this.main.len == 0) { + return; + } + + var str = ZigString.init(main_file_name); + this.global.deleteModuleRegistryEntry(&str); +} + +fn loadPreloads(this: *VirtualMachine) !?*JSInternalPromise { + this.is_in_preload = true; + defer this.is_in_preload = false; + + for (this.preload) |preload| { + var result = switch (this.transpiler.resolver.resolveAndAutoInstall( + this.transpiler.fs.top_level_dir, + normalizeSource(preload), + .stmt, + if (this.standalone_module_graph == null) .read_only else .disable, + )) { + .success => |r| r, + .failure => |e| { + this.log.addErrorFmt( + null, + logger.Loc.Empty, + this.allocator, + "{s} resolving preload {}", + .{ + @errorName(e), + bun.fmt.formatJSONStringLatin1(preload), + }, + ) catch unreachable; + return e; + }, + .pending, .not_found => { + this.log.addErrorFmt( + null, + logger.Loc.Empty, + this.allocator, + "preload not found {}", + .{ + bun.fmt.formatJSONStringLatin1(preload), + }, + ) catch unreachable; + return error.ModuleNotFound; + }, + }; + var promise = JSModuleLoader.import(this.global, &String.fromBytes(result.path().?.text)); + + this.pending_internal_promise = promise; + JSValue.fromCell(promise).protect(); + defer JSValue.fromCell(promise).unprotect(); + + // pending_internal_promise can change if hot module reloading is enabled + if (this.isWatcherEnabled()) { + this.eventLoop().performGC(); + switch (this.pending_internal_promise.?.status(this.global.vm())) { + .pending => { + while (this.pending_internal_promise.?.status(this.global.vm()) == .pending) { + this.eventLoop().tick(); + + if (this.pending_internal_promise.?.status(this.global.vm()) == .pending) { + this.eventLoop().autoTick(); + } + } + }, + else => {}, + } + } else { + this.eventLoop().performGC(); + this.waitForPromise(JSC.AnyPromise{ + .internal = promise, + }); + } + + if (promise.status(this.global.vm()) == .rejected) + return promise; + } + + // only load preloads once + this.preload.len = 0; + + return null; +} + +pub fn ensureDebugger(this: *VirtualMachine, block_until_connected: bool) !void { + if (this.debugger != null) { + try JSC.Debugger.create(this, this.global); + + if (block_until_connected) { + JSC.Debugger.waitForDebuggerIfNecessary(this); + } + } +} + +extern fn Bun__loadHTMLEntryPoint(global: *JSGlobalObject) *JSInternalPromise; + +pub fn reloadEntryPoint(this: *VirtualMachine, entry_path: []const u8) !*JSInternalPromise { + this.has_loaded = false; + this.main = entry_path; + this.main_hash = Watcher.getHash(entry_path); + + try this.ensureDebugger(true); + + if (!this.main_is_html_entrypoint) { + try this.entry_point.generate( + this.allocator, + this.bun_watcher != .none, + entry_path, + main_file_name, + ); + } + + if (!this.transpiler.options.disable_transpilation) { + if (this.preload.len > 0) { + if (try this.loadPreloads()) |promise| { + JSValue.fromCell(promise).ensureStillAlive(); + JSValue.fromCell(promise).protect(); + this.pending_internal_promise = promise; + return promise; + } + + // Check if Module.runMain was patched + const prev = this.pending_internal_promise; + if (this.has_patched_run_main) { + @branchHint(.cold); + this.pending_internal_promise = null; + const ret = NodeModuleModule__callOverriddenRunMain(this.global, bun.String.createUTF8ForJS(this.global, main_file_name)); + if (this.pending_internal_promise == prev or this.pending_internal_promise == null) { + this.pending_internal_promise = JSInternalPromise.resolvedPromise(this.global, ret); + return this.pending_internal_promise.?; + } + return (this.pending_internal_promise orelse prev).?; + } + } + + const promise = if (!this.main_is_html_entrypoint) + JSModuleLoader.loadAndEvaluateModule(this.global, &String.init(main_file_name)) orelse return error.JSError + else + Bun__loadHTMLEntryPoint(this.global); + + this.pending_internal_promise = promise; + JSValue.fromCell(promise).ensureStillAlive(); + return promise; + } else { + const promise = JSModuleLoader.loadAndEvaluateModule(this.global, &String.fromBytes(this.main)) orelse return error.JSError; + this.pending_internal_promise = promise; + JSValue.fromCell(promise).ensureStillAlive(); + + return promise; + } +} + +extern "C" fn NodeModuleModule__callOverriddenRunMain(global: *JSGlobalObject, argv1: JSValue) JSValue; +export fn Bun__VirtualMachine__setOverrideModuleRunMain(vm: *VirtualMachine, is_patched: bool) void { + if (vm.is_in_preload) { + vm.has_patched_run_main = is_patched; + } +} +export fn Bun__VirtualMachine__setOverrideModuleRunMainPromise(vm: *VirtualMachine, promise: *JSInternalPromise) void { + if (vm.pending_internal_promise == null) { + vm.pending_internal_promise = promise; + } +} + +pub fn reloadEntryPointForTestRunner(this: *VirtualMachine, entry_path: []const u8) !*JSInternalPromise { + this.has_loaded = false; + this.main = entry_path; + this.main_hash = Watcher.getHash(entry_path); + + this.eventLoop().ensureWaker(); + + try this.ensureDebugger(true); + + if (!this.transpiler.options.disable_transpilation) { + if (try this.loadPreloads()) |promise| { + JSValue.fromCell(promise).ensureStillAlive(); + this.pending_internal_promise = promise; + JSValue.fromCell(promise).protect(); + + return promise; + } + } + + const promise = JSModuleLoader.loadAndEvaluateModule(this.global, &String.fromBytes(this.main)) orelse return error.JSError; + this.pending_internal_promise = promise; + JSValue.fromCell(promise).ensureStillAlive(); + + return promise; +} + +// worker dont has bun_watcher and also we dont wanna call autoTick before dispatchOnline +pub fn loadEntryPointForWebWorker(this: *VirtualMachine, entry_path: string) anyerror!*JSInternalPromise { + const promise = try this.reloadEntryPoint(entry_path); + this.eventLoop().performGC(); + this.eventLoop().waitForPromiseWithTermination(JSC.AnyPromise{ + .internal = promise, + }); + if (this.worker) |worker| { + if (worker.hasRequestedTerminate()) { + return error.WorkerTerminated; + } + } + return this.pending_internal_promise.?; +} + +pub fn loadEntryPointForTestRunner(this: *VirtualMachine, entry_path: string) anyerror!*JSInternalPromise { + var promise = try this.reloadEntryPointForTestRunner(entry_path); + + // pending_internal_promise can change if hot module reloading is enabled + if (this.isWatcherEnabled()) { + this.eventLoop().performGC(); + switch (this.pending_internal_promise.?.status(this.global.vm())) { + .pending => { + while (this.pending_internal_promise.?.status(this.global.vm()) == .pending) { + this.eventLoop().tick(); + + if (this.pending_internal_promise.?.status(this.global.vm()) == .pending) { + this.eventLoop().autoTick(); + } + } + }, + else => {}, + } + } else { + if (promise.status(this.global.vm()) == .rejected) { + return promise; + } + + this.eventLoop().performGC(); + this.waitForPromise(.{ .internal = promise }); + } + + this.eventLoop().autoTick(); + + return this.pending_internal_promise.?; +} + +pub fn loadEntryPoint(this: *VirtualMachine, entry_path: string) anyerror!*JSInternalPromise { + var promise = try this.reloadEntryPoint(entry_path); + + // pending_internal_promise can change if hot module reloading is enabled + if (this.isWatcherEnabled()) { + this.eventLoop().performGC(); + switch (this.pending_internal_promise.?.status(this.global.vm())) { + .pending => { + while (this.pending_internal_promise.?.status(this.global.vm()) == .pending) { + this.eventLoop().tick(); + + if (this.pending_internal_promise.?.status(this.global.vm()) == .pending) { + this.eventLoop().autoTick(); + } + } + }, + else => {}, + } + } else { + if (promise.status(this.global.vm()) == .rejected) { + return promise; + } + + this.eventLoop().performGC(); + this.waitForPromise(.{ .internal = promise }); + } + + return this.pending_internal_promise.?; +} + +pub fn addListeningSocketForWatchMode(this: *VirtualMachine, socket: bun.FileDescriptor) void { + if (this.hot_reload != .watch) { + return; + } + + this.rareData().addListeningSocketForWatchMode(socket); +} +pub fn removeListeningSocketForWatchMode(this: *VirtualMachine, socket: bun.FileDescriptor) void { + if (this.hot_reload != .watch) { + return; + } + + this.rareData().removeListeningSocketForWatchMode(socket); +} + +pub fn loadMacroEntryPoint(this: *VirtualMachine, entry_path: string, function_name: string, specifier: string, hash: i32) !*JSInternalPromise { + const entry_point_entry = try this.macro_entry_points.getOrPut(hash); + + if (!entry_point_entry.found_existing) { + var macro_entry_pointer: *MacroEntryPoint = this.allocator.create(MacroEntryPoint) catch unreachable; + entry_point_entry.value_ptr.* = macro_entry_pointer; + try macro_entry_pointer.generate(&this.transpiler, Fs.PathName.init(entry_path), function_name, hash, specifier); + } + const entry_point = entry_point_entry.value_ptr.*; + + var loader = MacroEntryPointLoader{ + .path = entry_point.source.path.text, + }; + + this.runWithAPILock(MacroEntryPointLoader, &loader, MacroEntryPointLoader.load); + return loader.promise orelse return error.JSError; +} + +/// A subtlelty of JavaScriptCore: +/// JavaScriptCore has many release asserts that check an API lock is currently held +/// We cannot hold it from Zig code because it relies on C++ ARIA to automatically release the lock +/// and it is not safe to copy the lock itself +/// So we have to wrap entry points to & from JavaScript with an API lock that calls out to C++ +pub inline fn runWithAPILock(this: *VirtualMachine, comptime Context: type, ctx: *Context, comptime function: fn (ctx: *Context) void) void { + this.global.vm().holdAPILock(ctx, JSC.OpaqueWrap(Context, function)); +} + +const MacroEntryPointLoader = struct { + path: string, + promise: ?*JSInternalPromise = null, + pub fn load(this: *MacroEntryPointLoader) void { + this.promise = VirtualMachine.get()._loadMacroEntryPoint(this.path); + } +}; + +pub inline fn _loadMacroEntryPoint(this: *VirtualMachine, entry_path: string) ?*JSInternalPromise { + var promise: *JSInternalPromise = undefined; + + promise = JSModuleLoader.loadAndEvaluateModule(this.global, &String.init(entry_path)) orelse return null; + this.waitForPromise(JSC.AnyPromise{ + .internal = promise, + }); + + return promise; +} + +pub fn printErrorLikeObjectToConsole(this: *VirtualMachine, value: JSValue) void { + this.runErrorHandler(value, null); +} + +// When the Error-like object is one of our own, it's best to rely on the object directly instead of serializing it to a ZigException. +// This is for: +// - BuildMessage +// - ResolveMessage +// If there were multiple errors, it could be contained in an AggregateError. +// In that case, this function becomes recursive. +// In all other cases, we will convert it to a ZigException. +pub fn printErrorlikeObject( + this: *VirtualMachine, + value: JSValue, + exception: ?*Exception, + exception_list: ?*ExceptionList, + formatter: *ConsoleObject.Formatter, + comptime Writer: type, + writer: Writer, + comptime allow_ansi_color: bool, + comptime allow_side_effects: bool, +) void { + var was_internal = false; + + defer { + if (was_internal) { + if (exception) |exception_| { + var holder = ZigException.Holder.init(); + var zig_exception: *ZigException = holder.zigException(); + holder.deinit(this); + exception_.getStackTrace(this.global, &zig_exception.stack); + if (zig_exception.stack.frames_len > 0) { + if (allow_ansi_color) { + printStackTrace(Writer, writer, zig_exception.stack, true) catch {}; + } else { + printStackTrace(Writer, writer, zig_exception.stack, false) catch {}; + } + } + + if (exception_list) |list| { + zig_exception.addToErrorList(list, this.transpiler.fs.top_level_dir, &this.origin) catch {}; + } + } + } + } + + if (value.isAggregateError(this.global)) { + const AggregateErrorIterator = struct { + writer: Writer, + current_exception_list: ?*ExceptionList = null, + formatter: *ConsoleObject.Formatter, + + pub fn iteratorWithColor(vm: *VM, globalObject: *JSGlobalObject, ctx: ?*anyopaque, nextValue: JSValue) callconv(.C) void { + iterator(vm, globalObject, nextValue, ctx.?, true); + } + pub fn iteratorWithOutColor(vm: *VM, globalObject: *JSGlobalObject, ctx: ?*anyopaque, nextValue: JSValue) callconv(.C) void { + iterator(vm, globalObject, nextValue, ctx.?, false); + } + inline fn iterator(_: *VM, _: *JSGlobalObject, nextValue: JSValue, ctx: ?*anyopaque, comptime color: bool) void { + const this_ = @as(*@This(), @ptrFromInt(@intFromPtr(ctx))); + VirtualMachine.get().printErrorlikeObject(nextValue, null, this_.current_exception_list, this_.formatter, Writer, this_.writer, color, allow_side_effects); + } + }; + var iter = AggregateErrorIterator{ .writer = writer, .current_exception_list = exception_list, .formatter = formatter }; + if (comptime allow_ansi_color) { + value.getErrorsProperty(this.global).forEach(this.global, &iter, AggregateErrorIterator.iteratorWithColor); + } else { + value.getErrorsProperty(this.global).forEach(this.global, &iter, AggregateErrorIterator.iteratorWithOutColor); + } + return; + } + + was_internal = this.printErrorFromMaybePrivateData( + value, + exception_list, + formatter, + Writer, + writer, + allow_ansi_color, + allow_side_effects, + ); +} + +fn printErrorFromMaybePrivateData( + this: *VirtualMachine, + value: JSValue, + exception_list: ?*ExceptionList, + formatter: *ConsoleObject.Formatter, + comptime Writer: type, + writer: Writer, + comptime allow_ansi_color: bool, + comptime allow_side_effects: bool, +) bool { + if (value.jsType() == .DOMWrapper) { + if (value.as(bun.api.BuildMessage)) |build_error| { + defer Output.flush(); + if (!build_error.logged) { + if (this.had_errors) { + writer.writeAll("\n") catch {}; + } + build_error.msg.writeFormat(writer, allow_ansi_color) catch {}; + build_error.logged = true; + writer.writeAll("\n") catch {}; + } + this.had_errors = this.had_errors or build_error.msg.kind == .err; + if (exception_list != null) { + this.log.addMsg( + build_error.msg, + ) catch {}; + } + return true; + } else if (value.as(bun.api.ResolveMessage)) |resolve_error| { + defer Output.flush(); + if (!resolve_error.logged) { + if (this.had_errors) { + writer.writeAll("\n") catch {}; + } + resolve_error.msg.writeFormat(writer, allow_ansi_color) catch {}; + resolve_error.logged = true; + writer.writeAll("\n") catch {}; + } + + this.had_errors = this.had_errors or resolve_error.msg.kind == .err; + + if (exception_list != null) { + this.log.addMsg( + resolve_error.msg, + ) catch {}; + } + return true; + } + } + + this.printErrorInstance( + .js, + value, + exception_list, + formatter, + Writer, + writer, + allow_ansi_color, + allow_side_effects, + ) catch |err| { + if (err == error.JSError) { + this.global.clearException(); + } else if (comptime Environment.isDebug) { + // yo dawg + Output.printErrorln("Error while printing Error-like object: {s}", .{@errorName(err)}); + Output.flush(); + } + }; + + return false; +} + +pub fn reportUncaughtException(globalObject: *JSGlobalObject, exception: *Exception) JSValue { + var jsc_vm = globalObject.bunVM(); + _ = jsc_vm.uncaughtException(globalObject, exception.value(), false); + return .undefined; +} + +pub fn printStackTrace(comptime Writer: type, writer: Writer, trace: ZigStackTrace, comptime allow_ansi_colors: bool) !void { + const stack = trace.frames(); + if (stack.len > 0) { + var vm = VirtualMachine.get(); + const origin: ?*const URL = if (vm.is_from_devserver) &vm.origin else null; + const dir = vm.transpiler.fs.top_level_dir; + + for (stack) |frame| { + const file_slice = frame.source_url.toUTF8(bun.default_allocator); + defer file_slice.deinit(); + const func_slice = frame.function_name.toUTF8(bun.default_allocator); + defer func_slice.deinit(); + + const file = file_slice.slice(); + const func = func_slice.slice(); + + if (file.len == 0 and func.len == 0) continue; + + const has_name = std.fmt.count("{}", .{frame.nameFormatter(false)}) > 0; + + if (has_name and !frame.position.isInvalid()) { + try writer.print( + comptime Output.prettyFmt( + " at {} ({})\n", + allow_ansi_colors, + ), + .{ + frame.nameFormatter( + allow_ansi_colors, + ), + frame.sourceURLFormatter( + dir, + origin, + false, + allow_ansi_colors, + ), + }, + ); + } else if (!frame.position.isInvalid()) { + try writer.print( + comptime Output.prettyFmt( + " at {}\n", + allow_ansi_colors, + ), + .{ + frame.sourceURLFormatter( + dir, + origin, + false, + allow_ansi_colors, + ), + }, + ); + } else if (has_name) { + try writer.print( + comptime Output.prettyFmt( + " at {}\n", + allow_ansi_colors, + ), + .{ + frame.nameFormatter( + allow_ansi_colors, + ), + }, + ); + } else { + try writer.print( + comptime Output.prettyFmt( + " at {}\n", + allow_ansi_colors, + ), + .{ + frame.sourceURLFormatter( + dir, + origin, + false, + allow_ansi_colors, + ), + }, + ); + } + } + } +} + +pub export fn Bun__remapStackFramePositions(vm: *JSC.VirtualMachine, frames: [*]JSC.ZigStackFrame, frames_count: usize) void { + // **Warning** this method can be called in the heap collector thread!! + // https://github.com/oven-sh/bun/issues/17087 + vm.remapStackFramePositions(frames, frames_count); +} + +pub fn remapStackFramePositions(this: *VirtualMachine, frames: [*]JSC.ZigStackFrame, frames_count: usize) void { + for (frames[0..frames_count]) |*frame| { + if (frame.position.isInvalid() or frame.remapped) continue; + var sourceURL = frame.source_url.toUTF8(bun.default_allocator); + defer sourceURL.deinit(); + + // **Warning** this method can be called in the heap collector thread!! + // https://github.com/oven-sh/bun/issues/17087 + this.remap_stack_frames_mutex.lock(); + defer this.remap_stack_frames_mutex.unlock(); + + if (this.resolveSourceMapping( + sourceURL.slice(), + @max(frame.position.line.zeroBased(), 0), + @max(frame.position.column.zeroBased(), 0), + .no_source_contents, + )) |lookup| { + const source_map = lookup.source_map; + defer if (source_map) |map| map.deref(); + if (lookup.displaySourceURLIfNeeded(sourceURL.slice())) |source_url| { + frame.source_url.deref(); + frame.source_url = source_url; + } + const mapping = lookup.mapping; + frame.position.line = Ordinal.fromZeroBased(mapping.original.lines); + frame.position.column = Ordinal.fromZeroBased(mapping.original.columns); + frame.remapped = true; + } else { + // we don't want it to be remapped again + frame.remapped = true; + } + } +} + +pub fn remapZigException( + this: *VirtualMachine, + exception: *ZigException, + error_instance: JSValue, + exception_list: ?*ExceptionList, + must_reset_parser_arena_later: *bool, + source_code_slice: *?ZigString.Slice, + allow_source_code_preview: bool, +) void { + error_instance.toZigException(this.global, exception); + const enable_source_code_preview = allow_source_code_preview and + !(bun.getRuntimeFeatureFlag("BUN_DISABLE_SOURCE_CODE_PREVIEW") or + bun.getRuntimeFeatureFlag("BUN_DISABLE_TRANSPILED_SOURCE_CODE_PREVIEW")); + + defer { + if (Environment.isDebug) { + if (!enable_source_code_preview and source_code_slice.* != null) { + Output.panic("Do not collect source code when we don't need to", .{}); + } else if (!enable_source_code_preview and exception.stack.source_lines_numbers[0] != -1) { + Output.panic("Do not collect source code when we don't need to", .{}); + } + } + } + + // defer this so that it copies correctly + defer if (exception_list) |list| { + exception.addToErrorList(list, this.transpiler.fs.top_level_dir, &this.origin) catch unreachable; + }; + + const NoisyBuiltinFunctionMap = bun.ComptimeStringMap(void, .{ + .{"asyncModuleEvaluation"}, + .{"link"}, + .{"linkAndEvaluateModule"}, + .{"moduleEvaluation"}, + .{"processTicksAndRejections"}, + }); + + var frames: []JSC.ZigStackFrame = exception.stack.frames_ptr[0..exception.stack.frames_len]; + if (this.hide_bun_stackframes) { + var start_index: ?usize = null; + for (frames, 0..) |frame, i| { + if (frame.source_url.eqlComptime("bun:wrap") or + frame.function_name.eqlComptime("::bunternal::")) + { + start_index = i; + break; + } + + // Workaround for being unable to hide that specific frame without also hiding the frame before it + if (frame.source_url.isEmpty() and NoisyBuiltinFunctionMap.getWithEql(frame.function_name, String.eqlComptime) != null) { + start_index = 0; + break; + } + } + + if (start_index) |k| { + var j = k; + for (frames[k..]) |frame| { + if (frame.source_url.eqlComptime("bun:wrap") or + frame.function_name.eqlComptime("::bunternal::")) + { + continue; + } + + // Workaround for being unable to hide that specific frame without also hiding the frame before it + if (frame.source_url.isEmpty() and NoisyBuiltinFunctionMap.getWithEql(frame.function_name, String.eqlComptime) != null) { + continue; + } + + frames[j] = frame; + j += 1; + } + exception.stack.frames_len = @as(u8, @truncate(j)); + frames.len = j; + } + } + + if (frames.len == 0) return; + + var top = &frames[0]; + var top_frame_is_builtin = false; + if (this.hide_bun_stackframes) { + for (frames) |*frame| { + if (frame.source_url.hasPrefixComptime("bun:") or + frame.source_url.hasPrefixComptime("node:") or + frame.source_url.isEmpty() or + frame.source_url.eqlComptime("native") or + frame.source_url.eqlComptime("unknown")) + { + top_frame_is_builtin = true; + continue; + } + + top = frame; + top_frame_is_builtin = false; + break; + } + } + + var top_source_url = top.source_url.toUTF8(bun.default_allocator); + defer top_source_url.deinit(); + + const maybe_lookup = if (top.remapped) + SourceMap.Mapping.Lookup{ + .mapping = .{ + .generated = .{}, + .original = .{ + .lines = @max(top.position.line.zeroBased(), 0), + .columns = @max(top.position.column.zeroBased(), 0), + }, + .source_index = 0, + }, + .source_map = null, + .prefetched_source_code = null, + } + else + this.resolveSourceMapping( + top_source_url.slice(), + @max(top.position.line.zeroBased(), 0), + @max(top.position.column.zeroBased(), 0), + .source_contents, + ); + + if (maybe_lookup) |lookup| { + const mapping = lookup.mapping; + const source_map = lookup.source_map; + defer if (source_map) |map| map.deref(); + + if (!top.remapped) { + if (lookup.displaySourceURLIfNeeded(top_source_url.slice())) |src| { + top.source_url.deref(); + top.source_url = src; + } + } + + const code = code: { + if (!enable_source_code_preview) { + break :code ZigString.Slice.empty; + } + + if (!top.remapped and lookup.source_map != null and lookup.source_map.?.isExternal()) { + if (lookup.getSourceCode(top_source_url.slice())) |src| { + break :code src; + } + } + + if (top_frame_is_builtin) { + // Avoid printing "export default 'native'" + break :code ZigString.Slice.empty; + } + + var log = logger.Log.init(bun.default_allocator); + defer log.deinit(); + + var original_source = fetchWithoutOnLoadPlugins(this, this.global, top.source_url, bun.String.empty, &log, .print_source) catch return; + must_reset_parser_arena_later.* = true; + break :code original_source.source_code.toUTF8(bun.default_allocator); + }; + + if (enable_source_code_preview and code.len == 0) { + exception.collectSourceLines(error_instance, this.global); + } + + if (code.len > 0) + source_code_slice.* = code; + + top.position.line = Ordinal.fromZeroBased(mapping.original.lines); + top.position.column = Ordinal.fromZeroBased(mapping.original.columns); + + exception.remapped = true; + top.remapped = true; + + const last_line = @max(top.position.line.zeroBased(), 0); + if (strings.getLinesInText( + code.slice(), + @intCast(last_line), + ZigException.Holder.source_lines_count, + )) |lines_buf| { + var lines = lines_buf.slice(); + var source_lines = exception.stack.source_lines_ptr[0..ZigException.Holder.source_lines_count]; + var source_line_numbers = exception.stack.source_lines_numbers[0..ZigException.Holder.source_lines_count]; + @memset(source_lines, String.empty); + @memset(source_line_numbers, 0); + + lines = lines[0..@min(@as(usize, lines.len), source_lines.len)]; + var current_line_number: i32 = @intCast(last_line); + for (lines, source_lines[0..lines.len], source_line_numbers[0..lines.len]) |line, *line_dest, *line_number| { + // To minimize duplicate allocations, we use the same slice as above + // it should virtually always be UTF-8 and thus not cloned + line_dest.* = String.init(line); + line_number.* = current_line_number; + current_line_number -= 1; + } + + exception.stack.source_lines_len = @as(u8, @truncate(lines.len)); + } + } else if (enable_source_code_preview) { + exception.collectSourceLines(error_instance, this.global); + } + + if (frames.len > 1) { + for (frames) |*frame| { + if (frame == top or frame.position.isInvalid()) continue; + const source_url = frame.source_url.toUTF8(bun.default_allocator); + defer source_url.deinit(); + if (this.resolveSourceMapping( + source_url.slice(), + @max(frame.position.line.zeroBased(), 0), + @max(frame.position.column.zeroBased(), 0), + .no_source_contents, + )) |lookup| { + defer if (lookup.source_map) |map| map.deref(); + if (lookup.displaySourceURLIfNeeded(source_url.slice())) |src| { + frame.source_url.deref(); + frame.source_url = src; + } + const mapping = lookup.mapping; + frame.remapped = true; + frame.position.line = Ordinal.fromZeroBased(mapping.original.lines); + frame.position.column = Ordinal.fromZeroBased(mapping.original.columns); + } + } + } +} + +pub fn printExternallyRemappedZigException( + this: *VirtualMachine, + zig_exception: *ZigException, + formatter: ?*ConsoleObject.Formatter, + comptime Writer: type, + writer: Writer, + comptime allow_side_effects: bool, + comptime allow_ansi_color: bool, +) !void { + var default_formatter: ConsoleObject.Formatter = .{ .globalThis = this.global }; + defer default_formatter.deinit(); + try this.printErrorInstance( + .zig_exception, + zig_exception, + null, + formatter orelse &default_formatter, + Writer, + writer, + allow_ansi_color, + allow_side_effects, + ); +} + +fn printErrorInstance( + this: *VirtualMachine, + comptime mode: enum { js, zig_exception }, + error_instance: switch (mode) { + .js => JSValue, + .zig_exception => *ZigException, + }, + exception_list: ?*ExceptionList, + formatter: *ConsoleObject.Formatter, + comptime Writer: type, + writer: Writer, + comptime allow_ansi_color: bool, + comptime allow_side_effects: bool, +) !void { + var exception_holder = if (mode == .js) ZigException.Holder.init(); + var exception = if (mode == .js) exception_holder.zigException() else error_instance; + defer if (mode == .js) exception_holder.deinit(this); + defer if (mode == .js) error_instance.ensureStillAlive(); + + // The ZigException structure stores substrings of the source code, in + // which we need the lifetime of this data to outlive the inner call to + // remapZigException, but still get freed. + var source_code_slice: ?ZigString.Slice = null; + defer if (source_code_slice) |slice| slice.deinit(); + + if (mode == .js) { + this.remapZigException( + exception, + error_instance, + exception_list, + &exception_holder.need_to_clear_parser_arena_on_deinit, + &source_code_slice, + formatter.error_display_level != .warn, + ); + } + const prev_had_errors = this.had_errors; + this.had_errors = true; + defer this.had_errors = prev_had_errors; + + if (allow_side_effects) { + if (this.debugger) |*debugger| { + debugger.lifecycle_reporter_agent.reportError(exception); + } + } + + defer if (allow_side_effects and Output.is_github_action) + printGithubAnnotation(exception); + + // This is a longer number than necessary because we don't handle this case very well + // At the very least, we shouldn't dump 100 KB of minified code into your terminal. + const max_line_length_with_divot = 512; + const max_line_length = 1024; + + const line_numbers = exception.stack.source_lines_numbers[0..exception.stack.source_lines_len]; + var max_line: i32 = -1; + for (line_numbers) |line| max_line = @max(max_line, line); + const max_line_number_pad = std.fmt.count("{d}", .{max_line + 1}); + + var source_lines = exception.stack.sourceLineIterator(); + var last_pad: u64 = 0; + while (source_lines.untilLast()) |source| { + defer source.text.deinit(); + const display_line = source.line + 1; + + const int_size = std.fmt.count("{d}", .{display_line}); + const pad = max_line_number_pad - int_size; + last_pad = pad; + try writer.writeByteNTimes(' ', pad); + + const trimmed = std.mem.trimRight(u8, std.mem.trim(u8, source.text.slice(), "\n"), "\t "); + const clamped = trimmed[0..@min(trimmed.len, max_line_length)]; + + if (clamped.len != trimmed.len) { + const fmt = if (comptime allow_ansi_color) " | ... truncated \n" else "\n"; + try writer.print( + comptime Output.prettyFmt( + "{d} | {}" ++ fmt, + allow_ansi_color, + ), + .{ display_line, bun.fmt.fmtJavaScript(clamped, .{ .enable_colors = allow_ansi_color }) }, + ); + } else { + try writer.print( + comptime Output.prettyFmt( + "{d} | {}\n", + allow_ansi_color, + ), + .{ display_line, bun.fmt.fmtJavaScript(clamped, .{ .enable_colors = allow_ansi_color }) }, + ); + } + } + + const name = exception.name; + const message = exception.message; + + const is_error_instance = mode == .js and + (error_instance != .zero and error_instance.jsType() == .ErrorInstance); + const code: ?[]const u8 = if (is_error_instance) code: { + if (error_instance.uncheckedPtrCast(JSC.JSObject).getCodePropertyVMInquiry(this.global)) |code_value| { + if (code_value.isString()) { + const code_string = code_value.toBunString(this.global) catch { + // JSC::JSString to WTF::String can only fail on out of memory. + bun.outOfMemory(); + }; + defer code_string.deref(); + + if (code_string.is8Bit()) { + // We can count on this memory being valid until the end + // of this function because + break :code code_string.latin1(); + } + } + } + break :code null; + } else null; + + var did_print_name = false; + if (source_lines.next()) |source| brk: { + if (source.text.len == 0) break :brk; + + var top_frame = if (exception.stack.frames_len > 0) &exception.stack.frames()[0] else null; + + if (this.hide_bun_stackframes) { + for (exception.stack.frames()) |*frame| { + if (frame.position.isInvalid() or frame.source_url.hasPrefixComptime("bun:") or frame.source_url.hasPrefixComptime("node:")) continue; + top_frame = frame; + break; + } + } + + if (top_frame == null or top_frame.?.position.isInvalid()) { + defer did_print_name = true; + defer source.text.deinit(); + const trimmed = std.mem.trimRight(u8, std.mem.trim(u8, source.text.slice(), "\n"), "\t "); + + const text = trimmed[0..@min(trimmed.len, max_line_length)]; + + if (text.len != trimmed.len) { + const fmt = if (comptime allow_ansi_color) " | ... truncated \n" else "\n"; + try writer.print( + comptime Output.prettyFmt( + "- | {}" ++ fmt, + allow_ansi_color, + ), + .{bun.fmt.fmtJavaScript(text, .{ .enable_colors = allow_ansi_color })}, + ); + } else { + try writer.print( + comptime Output.prettyFmt( + "- | {}\n", + allow_ansi_color, + ), + .{bun.fmt.fmtJavaScript(text, .{ .enable_colors = allow_ansi_color })}, + ); + } + + try this.printErrorNameAndMessage(name, message, !exception.browser_url.isEmpty(), code, Writer, writer, allow_ansi_color, formatter.error_display_level); + } else if (top_frame) |top| { + defer did_print_name = true; + const display_line = source.line + 1; + const int_size = std.fmt.count("{d}", .{display_line}); + const pad = max_line_number_pad - int_size; + try writer.writeByteNTimes(' ', pad); + defer source.text.deinit(); + const text = source.text.slice(); + const trimmed = std.mem.trimRight(u8, std.mem.trim(u8, text, "\n"), "\t "); + + // TODO: preserve the divot position and possibly use stringWidth() to figure out where to put the divot + const clamped = trimmed[0..@min(trimmed.len, max_line_length)]; + + if (clamped.len != trimmed.len) { + const fmt = if (comptime allow_ansi_color) " | ... truncated \n\n" else "\n\n"; + try writer.print( + comptime Output.prettyFmt( + "{d} | {}" ++ fmt, + allow_ansi_color, + ), + .{ display_line, bun.fmt.fmtJavaScript(clamped, .{ .enable_colors = allow_ansi_color }) }, + ); + } else { + try writer.print( + comptime Output.prettyFmt( + "{d} | {}\n", + allow_ansi_color, + ), + .{ display_line, bun.fmt.fmtJavaScript(clamped, .{ .enable_colors = allow_ansi_color }) }, + ); + + if (clamped.len < max_line_length_with_divot or top.position.column.zeroBased() > max_line_length_with_divot) { + const indent = max_line_number_pad + " | ".len + @as(u64, @intCast(top.position.column.zeroBased())); + + try writer.writeByteNTimes(' ', indent); + try writer.print(comptime Output.prettyFmt( + "^\n", + allow_ansi_color, + ), .{}); + } else { + try writer.writeAll("\n"); + } + } + + try this.printErrorNameAndMessage(name, message, !exception.browser_url.isEmpty(), code, Writer, writer, allow_ansi_color, formatter.error_display_level); + } + } + + if (!did_print_name) { + try this.printErrorNameAndMessage(name, message, !exception.browser_url.isEmpty(), code, Writer, writer, allow_ansi_color, formatter.error_display_level); + } + + // This is usually unsafe to do, but we are protecting them each time first + var errors_to_append = std.ArrayList(JSValue).init(this.allocator); + defer { + for (errors_to_append.items) |err| { + err.unprotect(); + } + errors_to_append.deinit(); + } + + if (is_error_instance) { + var saw_cause = false; + const Iterator = JSC.JSPropertyIterator(.{ + .include_value = true, + .skip_empty_name = true, + .own_properties_only = true, + .observable = false, + .only_non_index_properties = true, + }); + // SAFETY: error instances are always objects + const error_obj = error_instance.getObject().?; + var iterator = try Iterator.init(this.global, error_obj); + defer iterator.deinit(); + const longest_name = @min(iterator.getLongestPropertyName(), 10); + var is_first_property = true; + while (try iterator.next()) |field| { + const value = iterator.value; + if (field.eqlComptime("message") or field.eqlComptime("name") or field.eqlComptime("stack")) { + continue; + } + + // We special-case the code property. Let's avoid printing it twice. + if (field.eqlComptime("code") and code != null) { + continue; + } + + const kind = value.jsType(); + if (kind == .ErrorInstance and + // avoid infinite recursion + !prev_had_errors) + { + if (field.eqlComptime("cause")) { + saw_cause = true; + } + value.protect(); + try errors_to_append.append(value); + } else if (kind.isObject() or kind.isArray() or value.isPrimitive() or kind.isStringLike()) { + var bun_str = bun.String.empty; + defer bun_str.deref(); + const prev_disable_inspect_custom = formatter.disable_inspect_custom; + const prev_quote_strings = formatter.quote_strings; + const prev_max_depth = formatter.max_depth; + const prev_format_buffer_as_text = formatter.format_buffer_as_text; + formatter.depth += 1; + formatter.format_buffer_as_text = true; + defer { + formatter.depth -= 1; + formatter.max_depth = prev_max_depth; + formatter.quote_strings = prev_quote_strings; + formatter.disable_inspect_custom = prev_disable_inspect_custom; + formatter.format_buffer_as_text = prev_format_buffer_as_text; + } + formatter.max_depth = 1; + formatter.quote_strings = true; + formatter.disable_inspect_custom = true; + + const pad_left = longest_name -| field.length(); + is_first_property = false; + try writer.writeByteNTimes(' ', pad_left); + + try writer.print(comptime Output.prettyFmt(" {}: ", allow_ansi_color), .{field}); + + // When we're printing errors for a top-level uncaught exception / rejection, suppress further errors here. + if (allow_side_effects) { + if (this.global.hasException()) { + this.global.clearException(); + } + } + + formatter.format( + JSC.Formatter.Tag.getAdvanced( + value, + this.global, + .{ .disable_inspect_custom = true, .hide_global = true }, + ), + Writer, + writer, + value, + this.global, + allow_ansi_color, + ) catch {}; + + if (allow_side_effects) { + // When we're printing errors for a top-level uncaught exception / rejection, suppress further errors here. + if (this.global.hasException()) { + this.global.clearException(); + } + } else if (this.global.hasException() or formatter.failed) { + return; + } + + try writer.writeAll(comptime Output.prettyFmt(",\n", allow_ansi_color)); + } + } + + if (code) |code_str| { + const pad_left = longest_name -| "code".len; + is_first_property = false; + try writer.writeByteNTimes(' ', pad_left); + + try writer.print(comptime Output.prettyFmt(" code: {}\n", allow_ansi_color), .{ + bun.fmt.quote(code_str), + }); + } + + if (!is_first_property) { + try writer.writeAll("\n"); + } + + // "cause" is not enumerable, so the above loop won't see it. + if (!saw_cause) { + if (error_instance.getOwn(this.global, "cause")) |cause| { + if (cause.jsType() == .ErrorInstance) { + cause.protect(); + try errors_to_append.append(cause); + } + } + } + } else if (mode == .js and error_instance != .zero) { + // If you do reportError([1,2,3]] we should still show something at least. + const tag = JSC.Formatter.Tag.getAdvanced( + error_instance, + this.global, + .{ .disable_inspect_custom = true, .hide_global = true }, + ); + if (tag.tag != .NativeCode) { + try formatter.format( + tag, + Writer, + writer, + error_instance, + this.global, + allow_ansi_color, + ); + + // Always include a newline in this case + try writer.writeAll("\n"); + } + } + + try printStackTrace(@TypeOf(writer), writer, exception.stack, allow_ansi_color); + + if (!exception.browser_url.isEmpty()) { + try writer.print( + comptime Output.prettyFmt( + " from browser tab {}\n", + allow_ansi_color, + ), + .{exception.browser_url}, + ); + } + + for (errors_to_append.items) |err| { + try writer.writeAll("\n"); + try this.printErrorInstance(.js, err, exception_list, formatter, Writer, writer, allow_ansi_color, allow_side_effects); + } +} + +fn printErrorNameAndMessage( + _: *VirtualMachine, + name: String, + message: String, + is_browser_error: bool, + optional_code: ?[]const u8, + comptime Writer: type, + writer: Writer, + comptime allow_ansi_color: bool, + error_display_level: ConsoleObject.FormatOptions.ErrorDisplayLevel, +) !void { + if (is_browser_error) { + try writer.writeAll(Output.prettyFmt("frontend ", true)); + } + if (!name.isEmpty() and !message.isEmpty()) { + const display_name, const display_message = if (name.eqlComptime("Error")) brk: { + // If `err.code` is set, and `err.message` is of form `{code}: {text}`, + // use the code as the name since `error: ENOENT: no such ...` is + // not as nice looking since it there are two error prefixes. + if (optional_code) |code| if (bun.strings.isAllASCII(code)) { + const has_prefix = switch (message.isUTF16()) { + inline else => |is_utf16| has_prefix: { + const msg_chars = if (is_utf16) message.utf16() else message.latin1(); + // + 1 to ensure the message is a non-empty string. + break :has_prefix msg_chars.len > code.len + ": ".len + 1 and + (if (is_utf16) + // there is no existing function to perform this slice comparison + // []const u16, []const u8 + for (code, msg_chars[0..code.len]) |a, b| { + if (a != b) break false; + } else true + else + bun.strings.eqlLong(msg_chars[0..code.len], code, false)) and + msg_chars[code.len] == ':' and + msg_chars[code.len + 1] == ' '; + }, + }; + if (has_prefix) break :brk .{ + String.init(code), + message.substring(code.len + ": ".len), + }; + }; + + break :brk .{ String.empty, message }; + } else .{ name, message }; + try writer.print(comptime Output.prettyFmt("{}{}\n", allow_ansi_color), .{ + error_display_level.formatter(display_name, allow_ansi_color, .include_colon), + display_message, + }); + } else if (!name.isEmpty()) { + try writer.print("{}\n", .{error_display_level.formatter(name, allow_ansi_color, .include_colon)}); + } else if (!message.isEmpty()) { + try writer.print(comptime Output.prettyFmt("{}{}\n", allow_ansi_color), .{ error_display_level.formatter(bun.String.empty, allow_ansi_color, .include_colon), message }); + } else { + try writer.print(comptime Output.prettyFmt("{}\n", allow_ansi_color), .{error_display_level.formatter(bun.String.empty, allow_ansi_color, .exclude_colon)}); + } +} + +// In Github Actions, emit an annotation that renders the error and location. +// https://docs.github.com/en/actions/using-workflows/workflow-commands-for-github-actions#setting-an-error-message +pub noinline fn printGithubAnnotation(exception: *ZigException) void { + @branchHint(.cold); + const name = exception.name; + const message = exception.message; + const frames = exception.stack.frames(); + const top_frame = if (frames.len > 0) frames[0] else null; + const dir = bun.getenvZ("GITHUB_WORKSPACE") orelse bun.fs.FileSystem.instance.top_level_dir; + const allocator = bun.default_allocator; + Output.flush(); + + var buffered_writer = std.io.bufferedWriter(Output.errorWriter()); + var writer = buffered_writer.writer(); + defer { + buffered_writer.flush() catch {}; + } + + var has_location = false; + + if (top_frame) |frame| { + if (!frame.position.isInvalid()) { + const source_url = frame.source_url.toUTF8(allocator); + defer source_url.deinit(); + const file = bun.path.relative(dir, source_url.slice()); + writer.print("\n::error file={s},line={d},col={d},title=", .{ + file, + frame.position.line.oneBased(), + frame.position.column.oneBased(), + }) catch {}; + has_location = true; + } + } + + if (!has_location) { + writer.print("\n::error title=", .{}) catch {}; + } + + if (name.isEmpty() or name.eqlComptime("Error")) { + writer.print("error", .{}) catch {}; + } else { + writer.print("{s}", .{name.githubAction()}) catch {}; + } + + if (!message.isEmpty()) { + const message_slice = message.toUTF8(allocator); + defer message_slice.deinit(); + const msg = message_slice.slice(); + + var cursor: u32 = 0; + while (strings.indexOfNewlineOrNonASCIIOrANSI(msg, cursor)) |i| { + cursor = i + 1; + if (msg[i] == '\n') { + const first_line = bun.String.fromUTF8(msg[0..i]); + writer.print(": {s}::", .{first_line.githubAction()}) catch {}; + break; + } + } else { + writer.print(": {s}::", .{message.githubAction()}) catch {}; + } + + while (strings.indexOfNewlineOrNonASCIIOrANSI(msg, cursor)) |i| { + cursor = i + 1; + if (msg[i] == '\n') { + break; + } + } + + if (cursor > 0) { + const body = ZigString.initUTF8(msg[cursor..]); + writer.print("{s}", .{body.githubAction()}) catch {}; + } + } else { + writer.print("::", .{}) catch {}; + } + + // TODO: cleanup and refactor to use printStackTrace() + if (top_frame) |_| { + const vm = VirtualMachine.get(); + const origin = if (vm.is_from_devserver) &vm.origin else null; + + var i: i16 = 0; + while (i < frames.len) : (i += 1) { + const frame = frames[@as(usize, @intCast(i))]; + const source_url = frame.source_url.toUTF8(allocator); + defer source_url.deinit(); + const file = bun.path.relative(dir, source_url.slice()); + const func = frame.function_name.toUTF8(allocator); + + if (file.len == 0 and func.len == 0) continue; + + const has_name = std.fmt.count("{any}", .{frame.nameFormatter( + false, + )}) > 0; + + // %0A = escaped newline + if (has_name) { + writer.print( + "%0A at {any} ({any})", + .{ + frame.nameFormatter(false), + frame.sourceURLFormatter( + file, + origin, + false, + false, + ), + }, + ) catch {}; + } else { + writer.print( + "%0A at {any}", + .{ + frame.sourceURLFormatter( + file, + origin, + false, + false, + ), + }, + ) catch {}; + } + } + } + + writer.print("\n", .{}) catch {}; +} + +pub fn resolveSourceMapping( + this: *VirtualMachine, + path: []const u8, + line: i32, + column: i32, + source_handling: SourceMap.SourceContentHandling, +) ?SourceMap.Mapping.Lookup { + return this.source_mappings.resolveMapping(path, line, column, source_handling) orelse { + if (this.standalone_module_graph) |graph| { + const file = graph.find(path) orelse return null; + const map = file.sourcemap.load() orelse return null; + + map.ref(); + + this.source_mappings.putValue(path, SavedSourceMap.Value.init(map)) catch + bun.outOfMemory(); + + const mapping = SourceMap.Mapping.find(map.mappings, line, column) orelse + return null; + + return .{ + .mapping = mapping, + .source_map = map, + .prefetched_source_code = null, + }; + } + + return null; + }; +} + +extern fn Process__emitMessageEvent(global: *JSGlobalObject, value: JSValue) void; +extern fn Process__emitDisconnectEvent(global: *JSGlobalObject) void; +pub extern fn Process__emitErrorEvent(global: *JSGlobalObject, value: JSValue) void; + +pub const IPCInstanceUnion = union(enum) { + /// IPC is put in this "enabled but not started" state when IPC is detected + /// but the client JavaScript has not yet done `.on("message")` + waiting: struct { + // TODO: rename to `fd` + info: bun.FD, + mode: IPC.Mode, + }, + initialized: *IPCInstance, +}; + +pub const IPCInstance = struct { + pub const new = bun.TrivialNew(@This()); + pub const deinit = bun.TrivialDeinit(@This()); + + globalThis: ?*JSGlobalObject, + context: if (Environment.isPosix) *uws.SocketContext else void, + data: IPC.IPCData, + has_disconnect_called: bool = false, + + const node_cluster_binding = @import("./node/node_cluster_binding.zig"); + + pub fn ipc(this: *IPCInstance) ?*IPC.IPCData { + return &this.data; + } + pub fn getGlobalThis(this: *IPCInstance) ?*JSGlobalObject { + return this.globalThis; + } + + pub fn handleIPCMessage(this: *IPCInstance, message: IPC.DecodedIPCMessage) void { + JSC.markBinding(@src()); + const globalThis = this.globalThis orelse return; + const event_loop = JSC.VirtualMachine.get().eventLoop(); + + switch (message) { + // In future versions we can read this in order to detect version mismatches, + // or disable future optimizations if the subprocess is old. + .version => |v| { + IPC.log("Parent IPC version is {d}", .{v}); + }, + .data => |data| { + IPC.log("Received IPC message from parent", .{}); + event_loop.enter(); + defer event_loop.exit(); + Process__emitMessageEvent(globalThis, data); + }, + .internal => |data| { + IPC.log("Received IPC internal message from parent", .{}); + event_loop.enter(); + defer event_loop.exit(); + node_cluster_binding.handleInternalMessageChild(globalThis, data) catch return; + }, + } + } + + pub fn handleIPCClose(this: *IPCInstance) void { + IPC.log("IPCInstance#handleIPCClose", .{}); + var vm = VirtualMachine.get(); + vm.ipc = null; + const event_loop = vm.eventLoop(); + node_cluster_binding.child_singleton.deinit(); + event_loop.enter(); + Process__emitDisconnectEvent(vm.global); + event_loop.exit(); + if (Environment.isPosix) { + uws.us_socket_context_free(0, this.context); + } + vm.channel_ref.disable(); + this.deinit(); + } + + export fn Bun__closeChildIPC(global: *JSGlobalObject) void { + if (global.bunVM().getIPCInstance()) |current_ipc| { + current_ipc.data.close(true); + } + } + + pub const Handlers = IPC.NewIPCHandler(IPCInstance); +}; + +pub fn initIPCInstance(this: *VirtualMachine, info: bun.FD, mode: IPC.Mode) void { + IPC.log("initIPCInstance {}", .{info}); + this.ipc = .{ .waiting = .{ .info = info, .mode = mode } }; +} + +pub fn getIPCInstance(this: *VirtualMachine) ?*IPCInstance { + if (this.ipc == null) return null; + if (this.ipc.? != .waiting) return this.ipc.?.initialized; + const opts = this.ipc.?.waiting; + + IPC.log("getIPCInstance {}", .{opts.info}); + + this.event_loop.ensureWaker(); + + const instance = switch (Environment.os) { + else => instance: { + const context = uws.us_create_socket_context(0, this.event_loop_handle.?, @sizeOf(usize), .{}).?; + IPC.Socket.configure(context, true, *IPCInstance, IPCInstance.Handlers); + + var instance = IPCInstance.new(.{ + .globalThis = this.global, + .context = context, + .data = undefined, + }); + + this.ipc = .{ .initialized = instance }; + + const socket = IPC.Socket.fromFd(context, opts.info, IPCInstance, instance, null) orelse { + instance.deinit(); + this.ipc = null; + Output.warn("Unable to start IPC socket", .{}); + return null; + }; + socket.setTimeout(0); + + instance.data = .{ .socket = socket, .mode = opts.mode }; + + break :instance instance; + }, + .windows => instance: { + var instance = IPCInstance.new(.{ + .globalThis = this.global, + .context = {}, + .data = .{ .mode = opts.mode }, + }); + + this.ipc = .{ .initialized = instance }; + + instance.data.configureClient(IPCInstance, instance, opts.info) catch { + instance.deinit(); + this.ipc = null; + Output.warn("Unable to start IPC pipe '{}'", .{opts.info}); + return null; + }; + + break :instance instance; + }, + }; + + instance.data.writeVersionPacket(this.global); + + return instance; +} + +/// To satisfy the interface from NewHotReloader() +pub fn getLoaders(vm: *VirtualMachine) *bun.options.Loader.HashTable { + return &vm.transpiler.options.loaders; +} + +/// To satisfy the interface from NewHotReloader() +pub fn bustDirCache(vm: *VirtualMachine, path: []const u8) bool { + return vm.transpiler.resolver.bustDirCache(path); +} + +pub const ExitHandler = struct { + exit_code: u8 = 0, + + pub export fn Bun__getExitCode(vm: *VirtualMachine) u8 { + return vm.exit_handler.exit_code; + } + + pub export fn Bun__setExitCode(vm: *VirtualMachine, code: u8) void { + vm.exit_handler.exit_code = code; + } + + extern fn Process__dispatchOnBeforeExit(*JSGlobalObject, code: u8) void; + extern fn Process__dispatchOnExit(*JSGlobalObject, code: u8) void; + extern fn Bun__closeAllSQLiteDatabasesForTermination() void; + + pub fn dispatchOnExit(this: *ExitHandler) void { + JSC.markBinding(@src()); + const vm: *VirtualMachine = @alignCast(@fieldParentPtr("exit_handler", this)); + Process__dispatchOnExit(vm.global, this.exit_code); + if (vm.isMainThread()) { + Bun__closeAllSQLiteDatabasesForTermination(); + } + } + + pub fn dispatchOnBeforeExit(this: *ExitHandler) void { + JSC.markBinding(@src()); + const vm: *VirtualMachine = @alignCast(@fieldParentPtr("exit_handler", this)); + Process__dispatchOnBeforeExit(vm.global, this.exit_code); + } +}; + +const std = @import("std"); +const bun = @import("bun"); +const Environment = bun.Environment; +const JSC = bun.jsc; +const JSGlobalObject = JSC.JSGlobalObject; +const Async = bun.Async; +const Transpiler = bun.Transpiler; +const ImportWatcher = JSC.hot_reloader.ImportWatcher; +const MutableString = bun.MutableString; +const stringZ = bun.stringZ; +const default_allocator = bun.default_allocator; +const StoredFileDescriptorType = bun.StoredFileDescriptorType; +const ErrorableString = JSC.ErrorableString; +const Arena = @import("../allocators/mimalloc_arena.zig").Arena; +const Exception = JSC.Exception; +const Allocator = std.mem.Allocator; +const IdentityContext = @import("../identity_context.zig").IdentityContext; +const Fs = @import("../fs.zig"); +const Resolver = @import("../resolver/resolver.zig"); +const ast = @import("../import_record.zig"); +const MacroEntryPoint = bun.transpiler.EntryPoints.MacroEntryPoint; +const ParseResult = bun.transpiler.ParseResult; +const logger = bun.logger; +const Api = @import("../api/schema.zig").Api; +const JSPrivateDataPtr = JSC.JSPrivateDataPtr; +const ConsoleObject = JSC.ConsoleObject; +const Node = JSC.Node; +const ZigException = JSC.ZigException; +const ZigStackTrace = JSC.ZigStackTrace; +const ErrorableResolvedSource = JSC.ErrorableResolvedSource; +const ResolvedSource = JSC.ResolvedSource; +const JSInternalPromise = JSC.JSInternalPromise; +const JSModuleLoader = JSC.JSModuleLoader; +const JSPromiseRejectionOperation = JSC.JSPromiseRejectionOperation; +const ErrorableZigString = JSC.ErrorableZigString; +const VM = JSC.VM; +const JSFunction = JSC.JSFunction; +const Config = @import("./config.zig"); +const URL = @import("../url.zig").URL; +const Bun = JSC.API.Bun; +const EventLoop = JSC.EventLoop; +const PendingResolution = @import("../resolver/resolver.zig").PendingResolution; +const ThreadSafeFunction = bun.api.napi.ThreadSafeFunction; +const PackageManager = @import("../install/install.zig").PackageManager; +const IPC = @import("ipc.zig"); +const DNSResolver = @import("api/bun/dns_resolver.zig").DNSResolver; +const Watcher = bun.Watcher; +const node_module_module = @import("./bindings/NodeModuleModule.zig"); +const ServerEntryPoint = bun.transpiler.EntryPoints.ServerEntryPoint; +const JSValue = JSC.JSValue; +const PluginRunner = bun.transpiler.PluginRunner; +const SavedSourceMap = JSC.SavedSourceMap; +const ModuleLoader = JSC.ModuleLoader; +const uws = bun.uws; +const Output = bun.Output; +const strings = bun.strings; +const SourceMap = bun.sourcemap; +const ZigString = JSC.ZigString; +const String = bun.String; +const Ordinal = bun.Ordinal; +const string = []const u8; +const FetchFlags = ModuleLoader.FetchFlags; +const Runtime = @import("../runtime.zig"); +const js_ast = bun.JSAst; +const js_printer = bun.js_printer; +const node_fallbacks = ModuleLoader.node_fallbacks; +const options = bun.options; +const webcore = bun.webcore; +const Global = bun.Global; +const DotEnv = bun.DotEnv; +const HotReloader = JSC.hot_reloader.HotReloader; +const Body = webcore.Body; diff --git a/src/bun.js/api.zig b/src/bun.js/api.zig new file mode 100644 index 0000000000..9ca9b4f52d --- /dev/null +++ b/src/bun.js/api.zig @@ -0,0 +1,48 @@ +//! "api" in this context means "the Bun APIs", as in "the exposed JS APIs" + +/// `globalThis.Bun` +pub const Bun = @import("api/BunObject.zig"); + +pub const Subprocess = @import("api/bun/subprocess.zig"); +pub const HashObject = @import("api/HashObject.zig"); +pub const UnsafeObject = @import("api/UnsafeObject.zig"); +pub const TOMLObject = @import("api/TOMLObject.zig"); +pub const Timer = @import("api/Timer.zig"); +pub const FFIObject = @import("api/FFIObject.zig"); +pub const AnyRequestContext = @import("api/server.zig").AnyRequestContext; +pub const AnyServer = @import("api/server.zig").AnyServer; +pub const BuildArtifact = @import("api/JSBundler.zig").BuildArtifact; +pub const BuildMessage = @import("BuildMessage.zig").BuildMessage; +pub const DNS = @import("api/bun/dns_resolver.zig"); +pub const DebugHTTPSServer = @import("api/server.zig").DebugHTTPSServer; +pub const DebugHTTPServer = @import("api/server.zig").DebugHTTPServer; +pub const FFI = @import("api/ffi.zig").FFI; +pub const HTMLRewriter = @import("api/html_rewriter.zig"); +pub const FileSystemRouter = @import("api/filesystem_router.zig").FileSystemRouter; +pub const Glob = @import("api/glob.zig"); +pub const H2FrameParser = @import("api/bun/h2_frame_parser.zig").H2FrameParser; +pub const HTMLBundle = @import("api/server/HTMLBundle.zig"); +pub const HTTPSServer = @import("api/server.zig").HTTPSServer; +pub const HTTPServer = @import("api/server.zig").HTTPServer; +pub const JSBundler = @import("api/JSBundler.zig").JSBundler; +pub const JSTranspiler = @import("api/JSTranspiler.zig"); +pub const Listener = @import("api/bun/socket.zig").Listener; +pub const MatchedRoute = @import("api/filesystem_router.zig").MatchedRoute; +pub const NativeBrotli = @import("node/node_zlib_binding.zig").SNativeBrotli; +pub const NativeZlib = @import("node/node_zlib_binding.zig").SNativeZlib; +pub const NodeHTTPResponse = @import("api/server.zig").NodeHTTPResponse; +pub const Postgres = @import("../sql/postgres.zig"); +pub const ResolveMessage = @import("ResolveMessage.zig").ResolveMessage; +pub const SavedRequest = @import("api/server.zig").SavedRequest; +pub const ServerConfig = @import("api/server.zig").ServerConfig; +pub const ServerWebSocket = @import("api/server.zig").ServerWebSocket; +pub const Shell = @import("../shell/shell.zig"); +pub const SocketAddress = @import("api/bun/socket.zig").SocketAddress; +pub const TCPSocket = @import("api/bun/socket.zig").TCPSocket; +pub const TLSSocket = @import("api/bun/socket.zig").TLSSocket; +pub const UDPSocket = @import("api/bun/udp_socket.zig").UDPSocket; +pub const Valkey = @import("../valkey/js_valkey.zig").JSValkeyClient; + +pub const napi = @import("../napi/napi.zig"); + +pub const node = @import("node.zig"); diff --git a/src/bun.js/api/BunObject.zig b/src/bun.js/api/BunObject.zig index dd13b27c32..b31f01e1bf 100644 --- a/src/bun.js/api/BunObject.zig +++ b/src/bun.js/api/BunObject.zig @@ -11,7 +11,7 @@ pub const BunObject = struct { pub const allocUnsafe = toJSCallback(Bun.allocUnsafe); pub const build = toJSCallback(Bun.JSBundler.buildFn); pub const color = toJSCallback(bun.css.CssColor.jsFunctionColor); - pub const connect = toJSCallback(JSC.wrapStaticMethod(JSC.API.Listener, "connect", false)); + pub const connect = toJSCallback(host_fn.wrapStaticMethod(api.Listener, "connect", false)); pub const createParsedShellScript = toJSCallback(bun.shell.ParsedShellScript.createParsedShellScript); pub const createShellInterpreter = toJSCallback(bun.shell.Interpreter.createShellInterpreter); pub const deflateSync = toJSCallback(JSZlib.deflateSync); @@ -21,7 +21,7 @@ pub const BunObject = struct { pub const indexOfLine = toJSCallback(Bun.indexOfLine); pub const inflateSync = toJSCallback(JSZlib.inflateSync); pub const jest = toJSCallback(@import("../test/jest.zig").Jest.call); - pub const listen = toJSCallback(JSC.wrapStaticMethod(JSC.API.Listener, "listen", false)); + pub const listen = toJSCallback(host_fn.wrapStaticMethod(api.Listener, "listen", false)); pub const mmap = toJSCallback(Bun.mmapFile); pub const nanoseconds = toJSCallback(Bun.nanoseconds); pub const openInEditor = toJSCallback(Bun.openInEditor); @@ -29,13 +29,13 @@ pub const BunObject = struct { pub const resolve = toJSCallback(Bun.resolve); pub const resolveSync = toJSCallback(Bun.resolveSync); pub const serve = toJSCallback(Bun.serve); - pub const sha = toJSCallback(JSC.wrapStaticMethod(Crypto.SHA512_256, "hash_", true)); + pub const sha = toJSCallback(host_fn.wrapStaticMethod(Crypto.SHA512_256, "hash_", true)); pub const shellEscape = toJSCallback(Bun.shellEscape); pub const shrink = toJSCallback(Bun.shrink); pub const sleepSync = toJSCallback(Bun.sleepSync); - pub const spawn = toJSCallback(JSC.wrapStaticMethod(JSC.Subprocess, "spawn", false)); - pub const spawnSync = toJSCallback(JSC.wrapStaticMethod(JSC.Subprocess, "spawnSync", false)); - pub const udpSocket = toJSCallback(JSC.wrapStaticMethod(JSC.API.UDPSocket, "udpSocket", false)); + pub const spawn = toJSCallback(host_fn.wrapStaticMethod(api.Subprocess, "spawn", false)); + pub const spawnSync = toJSCallback(host_fn.wrapStaticMethod(api.Subprocess, "spawnSync", false)); + pub const udpSocket = toJSCallback(host_fn.wrapStaticMethod(api.UDPSocket, "udpSocket", false)); pub const which = toJSCallback(Bun.which); pub const write = toJSCallback(JSC.WebCore.Blob.writeFile); @@ -84,7 +84,7 @@ pub const BunObject = struct { return "BunObject_callback_" ++ baseName; } - const toJSCallback = JSC.toJSHostFunction; + const toJSCallback = JSC.toJSHostFn; const LazyPropertyCallback = fn (*JSC.JSGlobalObject, *JSC.JSObject) callconv(JSC.conv) JSValue; @@ -266,7 +266,7 @@ pub fn which(globalThis: *JSC.JSGlobalObject, callframe: *JSC.CallFrame) bun.JSE const arguments_ = callframe.arguments_old(2); const path_buf = bun.PathBufferPool.get(); defer bun.PathBufferPool.put(path_buf); - var arguments = JSC.Node.ArgumentsSlice.init(globalThis.bunVM(), arguments_.slice()); + var arguments = JSC.CallFrame.ArgumentsSlice.init(globalThis.bunVM(), arguments_.slice()); defer arguments.deinit(); const path_arg = arguments.nextEat() orelse { return globalThis.throw("which: expected 1 argument, got 0", .{}); @@ -366,7 +366,7 @@ pub fn inspectTable(globalThis: *JSC.JSGlobalObject, callframe: *JSC.CallFrame) } // very stable memory address - var array = MutableString.init(getAllocator(globalThis), 0) catch bun.outOfMemory(); + var array = MutableString.init(bun.default_allocator, 0) catch bun.outOfMemory(); defer array.deinit(); var buffered_writer_ = MutableString.BufferedWriter{ .context = &array }; var buffered_writer = &buffered_writer_; @@ -424,7 +424,7 @@ pub fn inspect(globalThis: *JSC.JSGlobalObject, callframe: *JSC.CallFrame) bun.J } // very stable memory address - var array = MutableString.init(getAllocator(globalThis), 0) catch unreachable; + var array = MutableString.init(bun.default_allocator, 0) catch unreachable; defer array.deinit(); var buffered_writer_ = MutableString.BufferedWriter{ .context = &array }; var buffered_writer = &buffered_writer_; @@ -456,7 +456,7 @@ pub fn inspect(globalThis: *JSC.JSGlobalObject, callframe: *JSC.CallFrame) bun.J export fn Bun__inspect(globalThis: *JSGlobalObject, value: JSValue) bun.String { // very stable memory address - var array = MutableString.init(getAllocator(globalThis), 0) catch unreachable; + var array = MutableString.init(bun.default_allocator, 0) catch unreachable; defer array.deinit(); var buffered_writer = MutableString.BufferedWriter{ .context = &array }; const writer = buffered_writer.writer(); @@ -469,7 +469,7 @@ export fn Bun__inspect(globalThis: *JSGlobalObject, value: JSValue) bun.String { } export fn Bun__inspect_singleline(globalThis: *JSGlobalObject, value: JSValue) bun.String { - var array = MutableString.init(getAllocator(globalThis), 0) catch unreachable; + var array = MutableString.init(bun.default_allocator, 0) catch unreachable; defer array.deinit(); var buffered_writer = MutableString.BufferedWriter{ .context = &array }; const writer = buffered_writer.writer(); @@ -620,7 +620,7 @@ pub fn getMain(globalThis: *JSC.JSGlobalObject, _: *JSC.JSObject) JSC.JSValue { } pub fn getArgv(globalThis: *JSC.JSGlobalObject, _: *JSC.JSObject) JSC.JSValue { - return JSC.Node.Process.getArgv(globalThis); + return node.process.getArgv(globalThis); } const Editor = @import("../../open.zig").Editor; @@ -628,7 +628,7 @@ const Editor = @import("../../open.zig").Editor; pub fn openInEditor(globalThis: *JSC.JSGlobalObject, callframe: *JSC.CallFrame) bun.JSError!JSValue { var edit = &VirtualMachine.get().rareData().editor_context; const args = callframe.arguments_old(4); - var arguments = JSC.Node.ArgumentsSlice.init(globalThis.bunVM(), args.slice()); + var arguments = JSC.CallFrame.ArgumentsSlice.init(globalThis.bunVM(), args.slice()); defer arguments.deinit(); var path: string = ""; var editor_choice: ?Editor = null; @@ -777,7 +777,7 @@ pub fn shrink(globalObject: *JSC.JSGlobalObject, _: *JSC.CallFrame) bun.JSError! } fn doResolve(globalThis: *JSC.JSGlobalObject, arguments: []const JSValue) bun.JSError!JSC.JSValue { - var args = JSC.Node.ArgumentsSlice.init(globalThis.bunVM(), arguments); + var args = JSC.CallFrame.ArgumentsSlice.init(globalThis.bunVM(), arguments); defer args.deinit(); const specifier = args.protectEatNext() orelse { return globalThis.throwInvalidArguments("Expected a specifier and a from path", .{}); @@ -840,7 +840,7 @@ fn doResolveWithArgs(ctx: *JSC.JSGlobalObject, specifier: bun.String, from: bun. ); if (!errorable.success) { - return ctx.throwValue(bun.cast(JSC.JSValueRef, errorable.result.err.ptr.?).?.value()); + return ctx.throwValue(bun.cast(JSC.C.JSValueRef, errorable.result.err.ptr.?).?.value()); } if (query_string.len > 0) { @@ -1001,7 +1001,7 @@ pub fn nanoseconds(globalThis: *JSC.JSGlobalObject, _: *JSC.CallFrame) bun.JSErr pub fn serve(globalObject: *JSC.JSGlobalObject, callframe: *JSC.CallFrame) bun.JSError!JSC.JSValue { const arguments = callframe.arguments_old(2).slice(); var config: JSC.API.ServerConfig = brk: { - var args = JSC.Node.ArgumentsSlice.init(globalObject.bunVM(), arguments); + var args = JSC.CallFrame.ArgumentsSlice.init(globalObject.bunVM(), arguments); var config: JSC.API.ServerConfig = .{}; try JSC.API.ServerConfig.fromJS( @@ -1175,7 +1175,7 @@ pub fn mmapFile(globalThis: *JSC.JSGlobalObject, callframe: *JSC.CallFrame) bun. } const arguments_ = callframe.arguments_old(2); - var args = JSC.Node.ArgumentsSlice.init(globalThis.bunVM(), arguments_.slice()); + var args = JSC.CallFrame.ArgumentsSlice.init(globalThis.bunVM(), arguments_.slice()); defer args.deinit(); var buf: bun.PathBuffer = undefined; @@ -1325,15 +1325,6 @@ pub fn getUnsafe(globalThis: *JSC.JSGlobalObject, _: *JSC.JSObject) JSC.JSValue return UnsafeObject.create(globalThis); } -pub const HashObject = @import("./HashObject.zig"); -pub const UnsafeObject = @import("./UnsafeObject.zig"); -pub const TOMLObject = @import("./TOMLObject.zig"); - -const Debugger = JSC.Debugger; - -pub const Timer = @import("./Timer.zig"); -pub const FFIObject = @import("./FFIObject.zig"); - pub fn stringWidth(str: bun.String, opts: gen.StringWidthOptions) usize { if (str.length() == 0) return 0; @@ -1713,35 +1704,33 @@ pub const JSZlib = struct { } }; -pub const Subprocess = @import("./bun/subprocess.zig"); +// const InternalTestingAPIs = struct { +// pub fn BunInternalFunction__syntaxHighlighter(globalThis: *JSGlobalObject, callframe: *JSC.CallFrame) bun.JSError!JSValue { +// const args = callframe.arguments_old(1); +// if (args.len < 1) { +// globalThis.throwNotEnoughArguments("code", 1, 0); +// } -const InternalTestingAPIs = struct { - pub fn BunInternalFunction__syntaxHighlighter(globalThis: *JSGlobalObject, callframe: *JSC.CallFrame) bun.JSError!JSValue { - const args = callframe.arguments_old(1); - if (args.len < 1) { - globalThis.throwNotEnoughArguments("code", 1, 0); - } +// const code = args.ptr[0].toSliceOrNull(globalThis) orelse return .zero; +// defer code.deinit(); +// var buffer = MutableString.initEmpty(bun.default_allocator); +// defer buffer.deinit(); +// var writer = buffer.bufferedWriter(); +// const formatter = bun.fmt.fmtJavaScript(code.slice(), .{ +// .enable_colors = true, +// .check_for_unhighlighted_write = false, +// }); +// std.fmt.format(writer.writer(), "{}", .{formatter}) catch |err| { +// return globalThis.throwError(err, "Error formatting code"); +// }; - const code = args.ptr[0].toSliceOrNull(globalThis) orelse return .zero; - defer code.deinit(); - var buffer = MutableString.initEmpty(bun.default_allocator); - defer buffer.deinit(); - var writer = buffer.bufferedWriter(); - const formatter = bun.fmt.fmtJavaScript(code.slice(), .{ - .enable_colors = true, - .check_for_unhighlighted_write = false, - }); - std.fmt.format(writer.writer(), "{}", .{formatter}) catch |err| { - return globalThis.throwError(err, "Error formatting code"); - }; +// writer.flush() catch |err| { +// return globalThis.throwError(err, "Error formatting code"); +// }; - writer.flush() catch |err| { - return globalThis.throwError(err, "Error formatting code"); - }; - - return bun.String.createUTF8ForJS(globalThis, buffer.list.items); - } -}; +// return bun.String.createUTF8ForJS(globalThis, buffer.list.items); +// } +// }; comptime { _ = Crypto.JSPasswordObject.JSPasswordObject__create; @@ -1773,7 +1762,6 @@ const MacroEntryPoint = bun.transpiler.MacroEntryPoint; const logger = bun.logger; const Api = @import("../../api/schema.zig").Api; const options = @import("../../options.zig"); -const ServerEntryPoint = bun.transpiler.ServerEntryPoint; const js_printer = bun.js_printer; const js_parser = bun.js_parser; const js_ast = bun.JSAst; @@ -1794,16 +1782,14 @@ const Response = WebCore.Response; const Headers = WebCore.Headers; const Fetch = WebCore.Fetch; const JSC = bun.JSC; -const JSError = @import("../base.zig").JSError; - -const MarkedArrayBuffer = @import("../base.zig").MarkedArrayBuffer; -const getAllocator = @import("../base.zig").getAllocator; const JSValue = bun.JSC.JSValue; const JSGlobalObject = bun.JSC.JSGlobalObject; const JSPrivateDataPtr = bun.JSC.JSPrivateDataPtr; const ConsoleObject = bun.JSC.ConsoleObject; -const Node = bun.JSC.Node; +const api = bun.api; +const node = bun.api.node; +const host_fn = bun.jsc.host_fn; const ZigException = bun.JSC.ZigException; const ZigStackTrace = bun.JSC.ZigStackTrace; const ErrorableResolvedSource = bun.JSC.ErrorableResolvedSource; @@ -1829,3 +1815,10 @@ const Async = bun.Async; const SemverObject = bun.Semver.SemverObject; const Braces = @import("../../shell/braces.zig"); const Shell = @import("../../shell/shell.zig"); + +const Debugger = JSC.Debugger; +const HashObject = bun.api.HashObject; +const UnsafeObject = bun.api.UnsafeObject; +const TOMLObject = bun.api.TOMLObject; +const Timer = bun.api.Timer; +const FFIObject = bun.api.FFIObject; diff --git a/src/bun.js/api/FFIObject.zig b/src/bun.js/api/FFIObject.zig index 04755067b3..54e2e1aa32 100644 --- a/src/bun.js/api/FFIObject.zig +++ b/src/bun.js/api/FFIObject.zig @@ -9,7 +9,7 @@ pub fn newCString(globalThis: *JSGlobalObject, value: JSValue, byteOffset: ?JSVa } } -pub const dom_call = JSC.DOMCall("FFI", @This(), "ptr", JSC.DOMEffect.forRead(.TypedArrayProperties)); +pub const dom_call = DOMCall("FFI", @This(), "ptr", DOMEffect.forRead(.TypedArrayProperties)); pub fn toJS(globalObject: *JSC.JSGlobalObject) JSC.JSValue { const object = JSC.JSValue.createEmptyObject(globalObject, comptime std.meta.fieldNames(@TypeOf(fields)).len + 2); @@ -28,26 +28,26 @@ pub fn toJS(globalObject: *JSC.JSGlobalObject) JSC.JSValue { } pub const Reader = struct { - pub const DOMCalls = .{ - .u8 = JSC.DOMCall("Reader", @This(), "u8", JSC.DOMEffect.forRead(.World)), - .u16 = JSC.DOMCall("Reader", @This(), "u16", JSC.DOMEffect.forRead(.World)), - .u32 = JSC.DOMCall("Reader", @This(), "u32", JSC.DOMEffect.forRead(.World)), - .ptr = JSC.DOMCall("Reader", @This(), "ptr", JSC.DOMEffect.forRead(.World)), - .i8 = JSC.DOMCall("Reader", @This(), "i8", JSC.DOMEffect.forRead(.World)), - .i16 = JSC.DOMCall("Reader", @This(), "i16", JSC.DOMEffect.forRead(.World)), - .i32 = JSC.DOMCall("Reader", @This(), "i32", JSC.DOMEffect.forRead(.World)), - .i64 = JSC.DOMCall("Reader", @This(), "i64", JSC.DOMEffect.forRead(.World)), - .u64 = JSC.DOMCall("Reader", @This(), "u64", JSC.DOMEffect.forRead(.World)), - .intptr = JSC.DOMCall("Reader", @This(), "intptr", JSC.DOMEffect.forRead(.World)), - .f32 = JSC.DOMCall("Reader", @This(), "f32", JSC.DOMEffect.forRead(.World)), - .f64 = JSC.DOMCall("Reader", @This(), "f64", JSC.DOMEffect.forRead(.World)), + pub const dom_calls = .{ + .u8 = DOMCall("Reader", @This(), "u8", DOMEffect.forRead(.World)), + .u16 = DOMCall("Reader", @This(), "u16", DOMEffect.forRead(.World)), + .u32 = DOMCall("Reader", @This(), "u32", DOMEffect.forRead(.World)), + .ptr = DOMCall("Reader", @This(), "ptr", DOMEffect.forRead(.World)), + .i8 = DOMCall("Reader", @This(), "i8", DOMEffect.forRead(.World)), + .i16 = DOMCall("Reader", @This(), "i16", DOMEffect.forRead(.World)), + .i32 = DOMCall("Reader", @This(), "i32", DOMEffect.forRead(.World)), + .i64 = DOMCall("Reader", @This(), "i64", DOMEffect.forRead(.World)), + .u64 = DOMCall("Reader", @This(), "u64", DOMEffect.forRead(.World)), + .intptr = DOMCall("Reader", @This(), "intptr", DOMEffect.forRead(.World)), + .f32 = DOMCall("Reader", @This(), "f32", DOMEffect.forRead(.World)), + .f64 = DOMCall("Reader", @This(), "f64", DOMEffect.forRead(.World)), }; pub fn toJS(globalThis: *JSC.JSGlobalObject) JSC.JSValue { - const obj = JSC.JSValue.createEmptyObject(globalThis, std.meta.fieldNames(@TypeOf(Reader.DOMCalls)).len); + const obj = JSC.JSValue.createEmptyObject(globalThis, std.meta.fieldNames(@TypeOf(Reader.dom_calls)).len); - inline for (comptime std.meta.fieldNames(@TypeOf(Reader.DOMCalls))) |field| { - @field(Reader.DOMCalls, field).put(globalThis, obj); + inline for (comptime std.meta.fieldNames(@TypeOf(Reader.dom_calls))) |field| { + @field(Reader.dom_calls, field).put(globalThis, obj); } return obj; @@ -358,11 +358,11 @@ fn ptr_( } const array_buffer = value.asArrayBuffer(globalThis) orelse { - return JSC.toInvalidArguments("Expected ArrayBufferView but received {s}", .{@tagName(value.jsType())}, globalThis); + return globalThis.toInvalidArguments("Expected ArrayBufferView but received {s}", .{@tagName(value.jsType())}); }; if (array_buffer.len == 0) { - return JSC.toInvalidArguments("ArrayBufferView must have a length > 0. A pointer to empty memory doesn't work", .{}, globalThis); + return globalThis.toInvalidArguments("ArrayBufferView must have a length > 0. A pointer to empty memory doesn't work", .{}); } var addr: usize = @intFromPtr(array_buffer.ptr); @@ -372,7 +372,7 @@ fn ptr_( if (byteOffset) |off| { if (!off.isEmptyOrUndefinedOrNull()) { if (!off.isNumber()) { - return JSC.toInvalidArguments("Expected number for byteOffset", .{}, globalThis); + return globalThis.toInvalidArguments("Expected number for byteOffset", .{}); } } @@ -384,20 +384,20 @@ fn ptr_( } if (addr > @intFromPtr(array_buffer.ptr) + @as(usize, array_buffer.byte_len)) { - return JSC.toInvalidArguments("byteOffset out of bounds", .{}, globalThis); + return globalThis.toInvalidArguments("byteOffset out of bounds", .{}); } } if (addr > max_addressable_memory) { - return JSC.toInvalidArguments("Pointer is outside max addressible memory, which usually means a bug in your program.", .{}, globalThis); + return globalThis.toInvalidArguments("Pointer is outside max addressible memory, which usually means a bug in your program.", .{}); } if (addr == 0) { - return JSC.toInvalidArguments("Pointer must not be 0", .{}, globalThis); + return globalThis.toInvalidArguments("Pointer must not be 0", .{}); } if (addr == 0xDEADBEEF or addr == 0xaaaaaaaa or addr == 0xAAAAAAAA) { - return JSC.toInvalidArguments("ptr to invalid memory, that would segfault Bun :(", .{}, globalThis); + return globalThis.toInvalidArguments("ptr to invalid memory, that would segfault Bun :(", .{}); } if (comptime Environment.allow_assert) { @@ -414,16 +414,16 @@ const ValueOrError = union(enum) { pub fn getPtrSlice(globalThis: *JSGlobalObject, value: JSValue, byteOffset: ?JSValue, byteLength: ?JSValue) ValueOrError { if (!value.isNumber()) { - return .{ .err = JSC.toInvalidArguments("ptr must be a number.", .{}, globalThis) }; + return .{ .err = globalThis.toInvalidArguments("ptr must be a number.", .{}) }; } const num = value.asPtrAddress(); if (num == 0) { - return .{ .err = JSC.toInvalidArguments("ptr cannot be zero, that would segfault Bun :(", .{}, globalThis) }; + return .{ .err = globalThis.toInvalidArguments("ptr cannot be zero, that would segfault Bun :(", .{}) }; } // if (!std.math.isFinite(num)) { - // return .{ .err = JSC.toInvalidArguments("ptr must be a finite number.", .{}, globalThis) }; + // return .{ .err = globalThis.toInvalidArguments("ptr must be a finite number.", .{}) }; // } var addr = @as(usize, @bitCast(num)); @@ -438,40 +438,40 @@ pub fn getPtrSlice(globalThis: *JSGlobalObject, value: JSValue, byteOffset: ?JSV } if (addr == 0) { - return .{ .err = JSC.toInvalidArguments("ptr cannot be zero, that would segfault Bun :(", .{}, globalThis) }; + return .{ .err = globalThis.toInvalidArguments("ptr cannot be zero, that would segfault Bun :(", .{}) }; } if (!std.math.isFinite(byte_off.asNumber())) { - return .{ .err = JSC.toInvalidArguments("ptr must be a finite number.", .{}, globalThis) }; + return .{ .err = globalThis.toInvalidArguments("ptr must be a finite number.", .{}) }; } } else if (!byte_off.isEmptyOrUndefinedOrNull()) { // do nothing } else { - return .{ .err = JSC.toInvalidArguments("Expected number for byteOffset", .{}, globalThis) }; + return .{ .err = globalThis.toInvalidArguments("Expected number for byteOffset", .{}) }; } } if (addr == 0xDEADBEEF or addr == 0xaaaaaaaa or addr == 0xAAAAAAAA) { - return .{ .err = JSC.toInvalidArguments("ptr to invalid memory, that would segfault Bun :(", .{}, globalThis) }; + return .{ .err = globalThis.toInvalidArguments("ptr to invalid memory, that would segfault Bun :(", .{}) }; } if (byteLength) |valueLength| { if (!valueLength.isEmptyOrUndefinedOrNull()) { if (!valueLength.isNumber()) { - return .{ .err = JSC.toInvalidArguments("length must be a number.", .{}, globalThis) }; + return .{ .err = globalThis.toInvalidArguments("length must be a number.", .{}) }; } if (valueLength.asNumber() == 0.0) { - return .{ .err = JSC.toInvalidArguments("length must be > 0. This usually means a bug in your code.", .{}, globalThis) }; + return .{ .err = globalThis.toInvalidArguments("length must be > 0. This usually means a bug in your code.", .{}) }; } const length_i = valueLength.toInt64(); if (length_i < 0) { - return .{ .err = JSC.toInvalidArguments("length must be > 0. This usually means a bug in your code.", .{}, globalThis) }; + return .{ .err = globalThis.toInvalidArguments("length must be > 0. This usually means a bug in your code.", .{}) }; } if (length_i > max_addressable_memory) { - return .{ .err = JSC.toInvalidArguments("length exceeds max addressable memory. This usually means a bug in your code.", .{}, globalThis) }; + return .{ .err = globalThis.toInvalidArguments("length exceeds max addressable memory. This usually means a bug in your code.", .{}) }; } const length = @as(usize, @intCast(length_i)); @@ -520,17 +520,17 @@ pub fn toArrayBuffer( if (getCPtr(ctx_value)) |ctx_ptr| { ctx = @as(*anyopaque, @ptrFromInt(ctx_ptr)); } else if (!ctx_value.isUndefinedOrNull()) { - return JSC.toInvalidArguments("Expected user data to be a C pointer (number or BigInt)", .{}, globalThis); + return globalThis.toInvalidArguments("Expected user data to be a C pointer (number or BigInt)", .{}); } } } else if (!callback_value.isEmptyOrUndefinedOrNull()) { - return JSC.toInvalidArguments("Expected callback to be a C pointer (number or BigInt)", .{}, globalThis); + return globalThis.toInvalidArguments("Expected callback to be a C pointer (number or BigInt)", .{}); } } else if (finalizationCtxOrPtr) |callback_value| { if (getCPtr(callback_value)) |callback_ptr| { callback = @as(JSC.C.JSTypedArrayBytesDeallocator, @ptrFromInt(callback_ptr)); } else if (!callback_value.isEmptyOrUndefinedOrNull()) { - return JSC.toInvalidArguments("Expected callback to be a C pointer (number or BigInt)", .{}, globalThis); + return globalThis.toInvalidArguments("Expected callback to be a C pointer (number or BigInt)", .{}); } } @@ -562,17 +562,17 @@ pub fn toBuffer( if (getCPtr(ctx_value)) |ctx_ptr| { ctx = @as(*anyopaque, @ptrFromInt(ctx_ptr)); } else if (!ctx_value.isEmptyOrUndefinedOrNull()) { - return JSC.toInvalidArguments("Expected user data to be a C pointer (number or BigInt)", .{}, globalThis); + return globalThis.toInvalidArguments("Expected user data to be a C pointer (number or BigInt)", .{}); } } } else if (!callback_value.isEmptyOrUndefinedOrNull()) { - return JSC.toInvalidArguments("Expected callback to be a C pointer (number or BigInt)", .{}, globalThis); + return globalThis.toInvalidArguments("Expected callback to be a C pointer (number or BigInt)", .{}); } } else if (finalizationCtxOrPtr) |callback_value| { if (getCPtr(callback_value)) |callback_ptr| { callback = @as(JSC.C.JSTypedArrayBytesDeallocator, @ptrFromInt(callback_ptr)); } else if (!callback_value.isEmptyOrUndefinedOrNull()) { - return JSC.toInvalidArguments("Expected callback to be a C pointer (number or BigInt)", .{}, globalThis); + return globalThis.toInvalidArguments("Expected callback to be a C pointer (number or BigInt)", .{}); } } @@ -609,18 +609,14 @@ pub fn getter( } const fields = .{ - .viewSource = JSC.wrapStaticMethod( - JSC.FFI, - "print", - false, - ), - .dlopen = JSC.wrapStaticMethod(JSC.FFI, "open", false), - .callback = JSC.wrapStaticMethod(JSC.FFI, "callback", false), - .linkSymbols = JSC.wrapStaticMethod(JSC.FFI, "linkSymbols", false), - .toBuffer = JSC.wrapStaticMethod(@This(), "toBuffer", false), - .toArrayBuffer = JSC.wrapStaticMethod(@This(), "toArrayBuffer", false), - .closeCallback = JSC.wrapStaticMethod(JSC.FFI, "closeCallback", false), - .CString = JSC.wrapStaticMethod(Bun.FFIObject, "newCString", false), + .viewSource = JSC.host_fn.wrapStaticMethod(bun.api.FFI, "print", false), + .dlopen = JSC.host_fn.wrapStaticMethod(bun.api.FFI, "open", false), + .callback = JSC.host_fn.wrapStaticMethod(bun.api.FFI, "callback", false), + .linkSymbols = JSC.host_fn.wrapStaticMethod(bun.api.FFI, "linkSymbols", false), + .toBuffer = JSC.host_fn.wrapStaticMethod(@This(), "toBuffer", false), + .toArrayBuffer = JSC.host_fn.wrapStaticMethod(@This(), "toArrayBuffer", false), + .closeCallback = JSC.host_fn.wrapStaticMethod(bun.api.FFI, "closeCallback", false), + .CString = JSC.host_fn.wrapStaticMethod(bun.api.FFIObject, "newCString", false), }; const max_addressable_memory = std.math.maxInt(u56); @@ -628,6 +624,8 @@ const JSGlobalObject = JSC.JSGlobalObject; const JSObject = JSC.JSObject; const JSValue = JSC.JSValue; const JSC = bun.JSC; +const DOMCall = JSC.host_fn.DOMCall; +const DOMEffect = JSC.host_fn.DOMEffect; const bun = @import("bun"); const FFIObject = @This(); const Bun = JSC.API.Bun; diff --git a/src/bun.js/api/HashObject.zig b/src/bun.js/api/HashObject.zig index 8b86258806..6079a42d8f 100644 --- a/src/bun.js/api/HashObject.zig +++ b/src/bun.js/api/HashObject.zig @@ -56,12 +56,12 @@ pub fn create(globalThis: *JSC.JSGlobalObject) JSC.JSValue { return function; } -fn hashWrap(comptime Hasher_: anytype) JSC.JSHostZigFunction { +fn hashWrap(comptime Hasher_: anytype) JSC.JSHostFnZig { return struct { const Hasher = Hasher_; pub fn hash(globalThis: *JSC.JSGlobalObject, callframe: *JSC.CallFrame) bun.JSError!JSC.JSValue { const arguments = callframe.arguments_old(2).slice(); - var args = JSC.Node.ArgumentsSlice.init(globalThis.bunVM(), arguments); + var args = JSC.CallFrame.ArgumentsSlice.init(globalThis.bunVM(), arguments); defer args.deinit(); var input: []const u8 = ""; diff --git a/src/bun.js/api/JSBundler.zig b/src/bun.js/api/JSBundler.zig index 53a7c9923f..201bbbb803 100644 --- a/src/bun.js/api/JSBundler.zig +++ b/src/bun.js/api/JSBundler.zig @@ -5,15 +5,13 @@ const CombinedScanner = @import("../../url.zig").CombinedScanner; const bun = @import("bun"); const string = bun.string; const JSC = bun.JSC; -const WebCore = @import("../webcore/response.zig"); +const WebCore = bun.webcore; const Transpiler = bun.transpiler; const options = @import("../../options.zig"); const resolve_path = @import("../../resolver/resolve_path.zig"); const ScriptSrcStream = std.io.FixedBufferStream([]u8); const ZigString = JSC.ZigString; const Fs = @import("../../fs.zig"); -const Base = @import("../base.zig"); -const getAllocator = Base.getAllocator; const JSObject = JSC.JSObject; const JSValue = bun.JSC.JSValue; const JSGlobalObject = JSC.JSGlobalObject; @@ -21,7 +19,6 @@ const strings = bun.strings; const JSError = bun.JSError; const OOM = bun.OOM; -const To = Base.To; const Request = WebCore.Request; const String = bun.String; const FetchEvent = WebCore.FetchEvent; diff --git a/src/bun.js/api/JSTranspiler.zig b/src/bun.js/api/JSTranspiler.zig index 7f665b6e8b..905868d8be 100644 --- a/src/bun.js/api/JSTranspiler.zig +++ b/src/bun.js/api/JSTranspiler.zig @@ -5,24 +5,17 @@ const CombinedScanner = @import("../../url.zig").CombinedScanner; const bun = @import("bun"); const string = bun.string; const JSC = bun.JSC; -const WebCore = @import("../webcore/response.zig"); const Transpiler = bun.transpiler; const options = @import("../../options.zig"); const ScriptSrcStream = std.io.FixedBufferStream([]u8); const ZigString = JSC.ZigString; const Fs = @import("../../fs.zig"); -const Base = @import("../base.zig"); -const getAllocator = Base.getAllocator; const JSObject = JSC.JSObject; -const JSError = Base.JSError; const JSValue = bun.JSC.JSValue; const JSGlobalObject = JSC.JSGlobalObject; const strings = bun.strings; - -const To = Base.To; -const Request = WebCore.Request; - -const FetchEvent = WebCore.FetchEvent; +const Request = bun.webcore.Request; +const FetchEvent = bun.webcore.FetchEvent; const MacroMap = @import("../../resolver/package_json.zig").MacroMap; const TSConfigJSON = @import("../../resolver/tsconfig_json.zig").TSConfigJSON; const PackageJSON = @import("../../resolver/package_json.zig").PackageJSON; @@ -209,7 +202,7 @@ pub const TransformTask = struct { const error_value: JSValue = brk: { if (this.err) |err| { if (!this.log.hasAny()) { - break :brk JSC.BuildMessage.create( + break :brk bun.api.BuildMessage.create( this.global, bun.default_allocator, logger.Msg{ @@ -306,7 +299,7 @@ fn exportReplacementValue(value: JSValue, globalThis: *JSGlobalObject) bun.JSErr return null; } -fn transformOptionsFromJSC(globalObject: *JSC.JSGlobalObject, temp_allocator: std.mem.Allocator, args: *JSC.Node.ArgumentsSlice) (bun.JSError || bun.OOM)!TranspilerOptions { +fn transformOptionsFromJSC(globalObject: *JSC.JSGlobalObject, temp_allocator: std.mem.Allocator, args: *JSC.CallFrame.ArgumentsSlice) (bun.JSError || bun.OOM)!TranspilerOptions { const globalThis = globalObject; const object = args.next() orelse return TranspilerOptions{ .log = logger.Log.init(temp_allocator) }; if (object.isUndefinedOrNull()) return TranspilerOptions{ .log = logger.Log.init(temp_allocator) }; @@ -700,9 +693,9 @@ fn transformOptionsFromJSC(globalObject: *JSC.JSGlobalObject, temp_allocator: st } pub fn constructor(globalThis: *JSC.JSGlobalObject, callframe: *JSC.CallFrame) bun.JSError!*JSTranspiler { - var temp = bun.ArenaAllocator.init(getAllocator(globalThis)); + var temp = bun.ArenaAllocator.init(bun.default_allocator); const arguments = callframe.arguments_old(3); - var args = JSC.Node.ArgumentsSlice.init( + var args = JSC.CallFrame.ArgumentsSlice.init( globalThis.bunVM(), arguments.slice(), ); @@ -711,13 +704,13 @@ pub fn constructor(globalThis: *JSC.JSGlobalObject, callframe: *JSC.CallFrame) b const transpiler_options: TranspilerOptions = if (arguments.len > 0) try transformOptionsFromJSC(globalThis, temp.allocator(), &args) else - TranspilerOptions{ .log = logger.Log.init(getAllocator(globalThis)) }; + TranspilerOptions{ .log = logger.Log.init(bun.default_allocator) }; if (globalThis.hasException()) { return error.JSError; } - const allocator = getAllocator(globalThis); + const allocator = bun.default_allocator; if ((transpiler_options.log.warnings + transpiler_options.log.errors) > 0) { return globalThis.throwValue(transpiler_options.log.toJS(globalThis, allocator, "Failed to create transpiler")); @@ -826,7 +819,7 @@ fn getParseResult(this: *JSTranspiler, allocator: std.mem.Allocator, code: []con pub fn scan(this: *JSTranspiler, globalThis: *JSC.JSGlobalObject, callframe: *JSC.CallFrame) bun.JSError!JSC.JSValue { JSC.markBinding(@src()); const arguments = callframe.arguments_old(3); - var args = JSC.Node.ArgumentsSlice.init(globalThis.bunVM(), arguments.slice()); + var args = JSC.CallFrame.ArgumentsSlice.init(globalThis.bunVM(), arguments.slice()); defer args.deinit(); const code_arg = args.next() orelse { return globalThis.throwInvalidArgumentType("scan", "code", "string or Uint8Array"); @@ -898,7 +891,7 @@ pub fn scan(this: *JSTranspiler, globalThis: *JSC.JSGlobalObject, callframe: *JS pub fn transform(this: *JSTranspiler, globalThis: *JSC.JSGlobalObject, callframe: *JSC.CallFrame) bun.JSError!JSC.JSValue { JSC.markBinding(@src()); const arguments = callframe.arguments_old(3); - var args = JSC.Node.ArgumentsSlice.init(globalThis.bunVM(), arguments.slice()); + var args = JSC.CallFrame.ArgumentsSlice.init(globalThis.bunVM(), arguments.slice()); defer args.arena.deinit(); const code_arg = args.next() orelse { return globalThis.throwInvalidArgumentType("transform", "code", "string or Uint8Array"); @@ -947,7 +940,7 @@ pub fn transformSync( JSC.markBinding(@src()); const arguments = callframe.arguments_old(3); - var args = JSC.Node.ArgumentsSlice.init(globalThis.bunVM(), arguments.slice()); + var args = JSC.CallFrame.ArgumentsSlice.init(globalThis.bunVM(), arguments.slice()); defer args.arena.deinit(); const code_arg = args.next() orelse { return globalThis.throwInvalidArgumentType("transformSync", "code", "string or Uint8Array"); @@ -1064,7 +1057,7 @@ fn namedExportsToJS(global: *JSGlobalObject, named_exports: *JSAst.Ast.NamedExpo return JSValue.createEmptyArray(global, 0); var named_exports_iter = named_exports.iterator(); - var stack_fallback = std.heap.stackFallback(@sizeOf(bun.String) * 32, getAllocator(global)); + var stack_fallback = std.heap.stackFallback(@sizeOf(bun.String) * 32, bun.default_allocator); var allocator = stack_fallback.get(); var names = allocator.alloc( bun.String, @@ -1108,7 +1101,7 @@ fn namedImportsToJS( pub fn scanImports(this: *JSTranspiler, globalThis: *JSC.JSGlobalObject, callframe: *JSC.CallFrame) bun.JSError!JSC.JSValue { const arguments = callframe.arguments_old(2); - var args = JSC.Node.ArgumentsSlice.init(globalThis.bunVM(), arguments.slice()); + var args = JSC.CallFrame.ArgumentsSlice.init(globalThis.bunVM(), arguments.slice()); defer args.deinit(); const code_arg = args.next() orelse { diff --git a/src/bun.js/api/Timer.zig b/src/bun.js/api/Timer.zig index 43e32b8d51..c589fa4883 100644 --- a/src/bun.js/api/Timer.zig +++ b/src/bun.js/api/Timer.zig @@ -8,6 +8,7 @@ const JSGlobalObject = JSC.JSGlobalObject; const Debugger = JSC.Debugger; const Environment = bun.Environment; const uv = bun.windows.libuv; +const api = bun.api; const StatWatcherScheduler = @import("../node/node_fs_stat_watcher.zig").StatWatcherScheduler; const Timer = @This(); const DNSResolver = @import("./bun/dns_resolver.zig").DNSResolver; @@ -1329,10 +1330,10 @@ pub const EventLoopTimer = struct { pub fn fire(this: *EventLoopTimer, now: *const timespec, vm: *VirtualMachine) Arm { switch (this.tag) { - .PostgresSQLConnectionTimeout => return @as(*JSC.Postgres.PostgresSQLConnection, @alignCast(@fieldParentPtr("timer", this))).onConnectionTimeout(), - .PostgresSQLConnectionMaxLifetime => return @as(*JSC.Postgres.PostgresSQLConnection, @alignCast(@fieldParentPtr("max_lifetime_timer", this))).onMaxLifetimeTimeout(), - .ValkeyConnectionTimeout => return @as(*JSC.API.Valkey, @alignCast(@fieldParentPtr("timer", this))).onConnectionTimeout(), - .ValkeyConnectionReconnect => return @as(*JSC.API.Valkey, @alignCast(@fieldParentPtr("reconnect_timer", this))).onReconnectTimer(), + .PostgresSQLConnectionTimeout => return @as(*api.Postgres.PostgresSQLConnection, @alignCast(@fieldParentPtr("timer", this))).onConnectionTimeout(), + .PostgresSQLConnectionMaxLifetime => return @as(*api.Postgres.PostgresSQLConnection, @alignCast(@fieldParentPtr("max_lifetime_timer", this))).onMaxLifetimeTimeout(), + .ValkeyConnectionTimeout => return @as(*api.Valkey, @alignCast(@fieldParentPtr("timer", this))).onConnectionTimeout(), + .ValkeyConnectionReconnect => return @as(*api.Valkey, @alignCast(@fieldParentPtr("reconnect_timer", this))).onReconnectTimer(), inline else => |t| { if (@FieldType(t.Type(), "event_loop_timer") != EventLoopTimer) { @compileError(@typeName(t.Type()) ++ " has wrong type for 'event_loop_timer'"); diff --git a/src/bun.js/api/bun/dns_resolver.zig b/src/bun.js/api/bun/dns_resolver.zig index aaa344430f..276b23ac96 100644 --- a/src/bun.js/api/bun/dns_resolver.zig +++ b/src/bun.js/api/bun/dns_resolver.zig @@ -18,7 +18,7 @@ const Async = bun.Async; const GetAddrInfoAsyncCallback = fn (i32, ?*std.c.addrinfo, ?*anyopaque) callconv(.C) void; const INET6_ADDRSTRLEN = if (bun.Environment.isWindows) 65 else 46; const IANA_DNS_PORT = 53; -const EventLoopTimer = JSC.BunTimer.EventLoopTimer; +const EventLoopTimer = bun.api.Timer.EventLoopTimer; const timespec = bun.timespec; const LibInfo = struct { @@ -41,7 +41,7 @@ const LibInfo = struct { if (loaded) return handle; loaded = true; - handle = bun.C.dlopen("libinfo.dylib", .{ .LAZY = true, .LOCAL = true }); + handle = bun.sys.dlopen("libinfo.dylib", .{ .LAZY = true, .LOCAL = true }); if (handle == null) Output.debug("libinfo.dylib not found", .{}); return handle; @@ -51,7 +51,7 @@ const LibInfo = struct { pub fn get() ?*const GetaddrinfoAsyncStart { bun.Environment.onlyMac(); - return bun.C.dlsymWithHandle(*const GetaddrinfoAsyncStart, "getaddrinfo_async_start", getHandle); + return bun.sys.dlsymWithHandle(*const GetaddrinfoAsyncStart, "getaddrinfo_async_start", getHandle); } }.get; @@ -59,7 +59,7 @@ const LibInfo = struct { pub fn get() ?*const GetaddrinfoAsyncHandleReply { bun.Environment.onlyMac(); - return bun.C.dlsymWithHandle(*const GetaddrinfoAsyncHandleReply, "getaddrinfo_async_handle_reply", getHandle); + return bun.sys.dlsymWithHandle(*const GetaddrinfoAsyncHandleReply, "getaddrinfo_async_handle_reply", getHandle); } }.get; @@ -112,7 +112,7 @@ const LibInfo = struct { ); if (errno != 0) { - request.head.promise.rejectTask(globalThis, globalThis.createErrorInstance("getaddrinfo_async_start error: {s}", .{@tagName(bun.C.getErrno(errno))})); + request.head.promise.rejectTask(globalThis, globalThis.createErrorInstance("getaddrinfo_async_start error: {s}", .{@tagName(bun.sys.getErrno(errno))})); if (request.cache.pending_cache) this.pending_host_cache_native.used.set(request.cache.pos_in_pending); this.vm.allocator.destroy(request); @@ -809,13 +809,13 @@ pub const GetAddrInfoRequest = struct { file_poll: ?*bun.Async.FilePoll = null, machport: ?*anyopaque = null, - extern fn getaddrinfo_send_reply(*anyopaque, *const JSC.DNS.LibInfo.GetaddrinfoAsyncHandleReply) bool; + extern fn getaddrinfo_send_reply(*anyopaque, *const bun.api.DNS.LibInfo.GetaddrinfoAsyncHandleReply) bool; pub fn onMachportChange(this: *GetAddrInfoRequest) void { if (comptime !Environment.isMac) unreachable; bun.JSC.markBinding(@src()); - if (!getaddrinfo_send_reply(this.backend.libinfo.machport.?, JSC.DNS.LibInfo.getaddrinfo_async_handle_reply().?)) { + if (!getaddrinfo_send_reply(this.backend.libinfo.machport.?, bun.api.DNS.LibInfo.getaddrinfo_async_handle_reply().?)) { log("onMachportChange: getaddrinfo_send_reply failed", .{}); getAddrInfoAsyncCallback(-1, null, this); } @@ -1229,7 +1229,7 @@ pub const InternalDNS = struct { file_poll: ?*bun.Async.FilePoll = null, machport: ?*anyopaque = null, - extern fn getaddrinfo_send_reply(*anyopaque, *const JSC.DNS.LibInfo.GetaddrinfoAsyncHandleReply) bool; + extern fn getaddrinfo_send_reply(*anyopaque, *const bun.api.DNS.LibInfo.GetaddrinfoAsyncHandleReply) bool; pub fn onMachportChange(this: *Request) void { if (!getaddrinfo_send_reply(this.libinfo.machport.?, LibInfo.getaddrinfo_async_handle_reply().?)) { libinfoCallback(@intFromEnum(std.c.E.NOSYS), null, this); @@ -3436,41 +3436,41 @@ pub const DNSResolver = struct { } comptime { - const js_resolve = JSC.toJSHostFunction(globalResolve); + const js_resolve = JSC.toJSHostFn(globalResolve); @export(&js_resolve, .{ .name = "Bun__DNS__resolve" }); - const js_lookup = JSC.toJSHostFunction(globalLookup); + const js_lookup = JSC.toJSHostFn(globalLookup); @export(&js_lookup, .{ .name = "Bun__DNS__lookup" }); - const js_resolveTxt = JSC.toJSHostFunction(globalResolveTxt); + const js_resolveTxt = JSC.toJSHostFn(globalResolveTxt); @export(&js_resolveTxt, .{ .name = "Bun__DNS__resolveTxt" }); - const js_resolveSoa = JSC.toJSHostFunction(globalResolveSoa); + const js_resolveSoa = JSC.toJSHostFn(globalResolveSoa); @export(&js_resolveSoa, .{ .name = "Bun__DNS__resolveSoa" }); - const js_resolveMx = JSC.toJSHostFunction(globalResolveMx); + const js_resolveMx = JSC.toJSHostFn(globalResolveMx); @export(&js_resolveMx, .{ .name = "Bun__DNS__resolveMx" }); - const js_resolveNaptr = JSC.toJSHostFunction(globalResolveNaptr); + const js_resolveNaptr = JSC.toJSHostFn(globalResolveNaptr); @export(&js_resolveNaptr, .{ .name = "Bun__DNS__resolveNaptr" }); - const js_resolveSrv = JSC.toJSHostFunction(globalResolveSrv); + const js_resolveSrv = JSC.toJSHostFn(globalResolveSrv); @export(&js_resolveSrv, .{ .name = "Bun__DNS__resolveSrv" }); - const js_resolveCaa = JSC.toJSHostFunction(globalResolveCaa); + const js_resolveCaa = JSC.toJSHostFn(globalResolveCaa); @export(&js_resolveCaa, .{ .name = "Bun__DNS__resolveCaa" }); - const js_resolveNs = JSC.toJSHostFunction(globalResolveNs); + const js_resolveNs = JSC.toJSHostFn(globalResolveNs); @export(&js_resolveNs, .{ .name = "Bun__DNS__resolveNs" }); - const js_resolvePtr = JSC.toJSHostFunction(globalResolvePtr); + const js_resolvePtr = JSC.toJSHostFn(globalResolvePtr); @export(&js_resolvePtr, .{ .name = "Bun__DNS__resolvePtr" }); - const js_resolveCname = JSC.toJSHostFunction(globalResolveCname); + const js_resolveCname = JSC.toJSHostFn(globalResolveCname); @export(&js_resolveCname, .{ .name = "Bun__DNS__resolveCname" }); - const js_resolveAny = JSC.toJSHostFunction(globalResolveAny); + const js_resolveAny = JSC.toJSHostFn(globalResolveAny); @export(&js_resolveAny, .{ .name = "Bun__DNS__resolveAny" }); - const js_getGlobalServers = JSC.toJSHostFunction(getGlobalServers); + const js_getGlobalServers = JSC.toJSHostFn(getGlobalServers); @export(&js_getGlobalServers, .{ .name = "Bun__DNS__getServers" }); - const js_setGlobalServers = JSC.toJSHostFunction(setGlobalServers); + const js_setGlobalServers = JSC.toJSHostFn(setGlobalServers); @export(&js_setGlobalServers, .{ .name = "Bun__DNS__setServers" }); - const js_reverse = JSC.toJSHostFunction(globalReverse); + const js_reverse = JSC.toJSHostFn(globalReverse); @export(&js_reverse, .{ .name = "Bun__DNS__reverse" }); - const js_lookupService = JSC.toJSHostFunction(globalLookupService); + const js_lookupService = JSC.toJSHostFn(globalLookupService); @export(&js_lookupService, .{ .name = "Bun__DNS__lookupService" }); - const js_prefetchFromJS = JSC.toJSHostFunction(InternalDNS.prefetchFromJS); + const js_prefetchFromJS = JSC.toJSHostFn(InternalDNS.prefetchFromJS); @export(&js_prefetchFromJS, .{ .name = "Bun__DNS__prefetch" }); - const js_getDNSCacheStats = JSC.toJSHostFunction(InternalDNS.getDNSCacheStats); + const js_getDNSCacheStats = JSC.toJSHostFn(InternalDNS.getDNSCacheStats); @export(&js_getDNSCacheStats, .{ .name = "Bun__DNS__getCacheStats" }); } }; diff --git a/src/bun.js/api/bun/h2_frame_parser.zig b/src/bun.js/api/bun/h2_frame_parser.zig index b7735b06b0..ced78657ab 100644 --- a/src/bun.js/api/bun/h2_frame_parser.zig +++ b/src/bun.js/api/bun/h2_frame_parser.zig @@ -1,4 +1,3 @@ -const getAllocator = @import("../../base.zig").getAllocator; const bun = @import("bun"); const Output = bun.Output; const std = @import("std"); @@ -30,7 +29,7 @@ pub fn getHTTP2CommonString(globalObject: *JSC.JSGlobalObject, hpack_index: u32) } const JSValue = JSC.JSValue; -const BinaryType = JSC.BinaryType; +const BinaryType = JSC.ArrayBuffer.BinaryType; const MAX_WINDOW_SIZE = 2147483647; const MAX_HEADER_TABLE_SIZE = 4294967295; const MAX_STREAM_ID = 2147483647; @@ -2674,7 +2673,7 @@ pub const H2FrameParser = struct { } if (this.outStandingPings >= this.maxOutstandingPings) { - const exception = JSC.toTypeError(.HTTP2_PING_CANCEL, "HTTP2 ping cancelled", .{}, globalObject); + const exception = globalObject.toTypeError(.HTTP2_PING_CANCEL, "HTTP2 ping cancelled", .{}); return globalObject.throwValue(exception); } @@ -2714,7 +2713,7 @@ pub const H2FrameParser = struct { defer origin_string.deinit(); const slice = origin_string.slice(); if (slice.len + 2 > 16384) { - const exception = JSC.toTypeError(.HTTP2_ORIGIN_LENGTH, "HTTP/2 ORIGIN frames are limited to 16382 bytes", .{}, globalObject); + const exception = globalObject.toTypeError(.HTTP2_ORIGIN_LENGTH, "HTTP/2 ORIGIN frames are limited to 16382 bytes", .{}); return globalObject.throwValue(exception); } @@ -2752,12 +2751,12 @@ pub const H2FrameParser = struct { defer origin_string.deinit(); const slice = origin_string.slice(); _ = writer.writeInt(u16, @intCast(slice.len), .big) catch { - const exception = JSC.toTypeError(.HTTP2_ORIGIN_LENGTH, "HTTP/2 ORIGIN frames are limited to 16382 bytes", .{}, globalObject); + const exception = globalObject.toTypeError(.HTTP2_ORIGIN_LENGTH, "HTTP/2 ORIGIN frames are limited to 16382 bytes", .{}); return globalObject.throwValue(exception); }; _ = writer.write(slice) catch { - const exception = JSC.toTypeError(.HTTP2_ORIGIN_LENGTH, "HTTP/2 ORIGIN frames are limited to 16382 bytes", .{}, globalObject); + const exception = globalObject.toTypeError(.HTTP2_ORIGIN_LENGTH, "HTTP/2 ORIGIN frames are limited to 16382 bytes", .{}); return globalObject.throwValue(exception); }; } @@ -3288,16 +3287,16 @@ pub const H2FrameParser = struct { const name = name_slice.slice(); if (header_name.charAt(0) == ':') { - const exception = JSC.toTypeError(.HTTP2_INVALID_PSEUDOHEADER, "\"{s}\" is an invalid pseudoheader or is used incorrectly", .{name}, globalObject); + const exception = globalObject.toTypeError(.HTTP2_INVALID_PSEUDOHEADER, "\"{s}\" is an invalid pseudoheader or is used incorrectly", .{name}); return globalObject.throwValue(exception); } var js_value = try headers_arg.getTruthy(globalObject, name) orelse { - const exception = JSC.toTypeError(.HTTP2_INVALID_HEADER_VALUE, "Invalid value for header \"{s}\"", .{name}, globalObject); + const exception = globalObject.toTypeError(.HTTP2_INVALID_HEADER_VALUE, "Invalid value for header \"{s}\"", .{name}); return globalObject.throwValue(exception); }; const validated_name = toValidHeaderName(name, name_buffer[0..name.len]) catch { - const exception = JSC.toTypeError(.INVALID_HTTP_TOKEN, "The arguments Header name is invalid. Received {s}", .{name}, globalObject); + const exception = globalObject.toTypeError(.INVALID_HTTP_TOKEN, "The arguments Header name is invalid. Received {s}", .{name}); return globalObject.throwValue(exception); }; @@ -3307,7 +3306,7 @@ pub const H2FrameParser = struct { if (SingleValueHeaders.indexOf(validated_name)) |idx| { if (value_iter.len > 1 or single_value_headers[idx]) { - const exception = JSC.toTypeError(.HTTP2_HEADER_SINGLE_VALUE, "Header field \"{s}\" must only have a single value", .{validated_name}, globalObject); + const exception = globalObject.toTypeError(.HTTP2_HEADER_SINGLE_VALUE, "Header field \"{s}\" must only have a single value", .{validated_name}); return globalObject.throwValue(exception); } single_value_headers[idx] = true; @@ -3315,12 +3314,12 @@ pub const H2FrameParser = struct { while (value_iter.next()) |item| { if (item.isEmptyOrUndefinedOrNull()) { - const exception = JSC.toTypeError(.HTTP2_INVALID_HEADER_VALUE, "Invalid value for header \"{s}\"", .{validated_name}, globalObject); + const exception = globalObject.toTypeError(.HTTP2_INVALID_HEADER_VALUE, "Invalid value for header \"{s}\"", .{validated_name}); return globalObject.throwValue(exception); } const value_str = item.toStringOrNull(globalObject) orelse { - const exception = JSC.toTypeError(.HTTP2_INVALID_HEADER_VALUE, "Invalid value for header \"{s}\"", .{validated_name}, globalObject); + const exception = globalObject.toTypeError(.HTTP2_INVALID_HEADER_VALUE, "Invalid value for header \"{s}\"", .{validated_name}); return globalObject.throwValue(exception); }; @@ -3344,13 +3343,13 @@ pub const H2FrameParser = struct { } else { if (SingleValueHeaders.indexOf(validated_name)) |idx| { if (single_value_headers[idx]) { - const exception = JSC.toTypeError(.HTTP2_HEADER_SINGLE_VALUE, "Header field \"{s}\" must only have a single value", .{validated_name}, globalObject); + const exception = globalObject.toTypeError(.HTTP2_HEADER_SINGLE_VALUE, "Header field \"{s}\" must only have a single value", .{validated_name}); return globalObject.throwValue(exception); } single_value_headers[idx] = true; } const value_str = js_value.toStringOrNull(globalObject) orelse { - const exception = JSC.toTypeError(.HTTP2_INVALID_HEADER_VALUE, "Invalid value for header \"{s}\"", .{validated_name}, globalObject); + const exception = globalObject.toTypeError(.HTTP2_INVALID_HEADER_VALUE, "Invalid value for header \"{s}\"", .{validated_name}); return globalObject.throwValue(exception); }; @@ -3659,7 +3658,7 @@ pub const H2FrameParser = struct { return JSC.JSValue.jsNumber(stream_id); } const validated_name = toValidHeaderName(name, name_buffer[0..name.len]) catch { - const exception = JSC.toTypeError(.INVALID_HTTP_TOKEN, "The arguments Header name is invalid. Received \"{s}\"", .{name}, globalObject); + const exception = globalObject.toTypeError(.INVALID_HTTP_TOKEN, "The arguments Header name is invalid. Received \"{s}\"", .{name}); return globalObject.throwValue(exception); }; @@ -3700,7 +3699,7 @@ pub const H2FrameParser = struct { if (SingleValueHeaders.indexOf(validated_name)) |idx| { if (value_iter.len > 1 or single_value_headers[idx]) { if (!globalObject.hasException()) { - const exception = JSC.toTypeError(.HTTP2_HEADER_SINGLE_VALUE, "Header field \"{s}\" must only have a single value", .{validated_name}, globalObject); + const exception = globalObject.toTypeError(.HTTP2_HEADER_SINGLE_VALUE, "Header field \"{s}\" must only have a single value", .{validated_name}); return globalObject.throwValue(exception); } return .zero; @@ -3747,7 +3746,7 @@ pub const H2FrameParser = struct { log("single header {s}", .{name}); if (SingleValueHeaders.indexOf(validated_name)) |idx| { if (single_value_headers[idx]) { - const exception = JSC.toTypeError(.HTTP2_HEADER_SINGLE_VALUE, "Header field \"{s}\" must only have a single value", .{validated_name}, globalObject); + const exception = globalObject.toTypeError(.HTTP2_HEADER_SINGLE_VALUE, "Header field \"{s}\" must only have a single value", .{validated_name}); return globalObject.throwValue(exception); } single_value_headers[idx] = true; diff --git a/src/bun.js/api/bun/process.zig b/src/bun.js/api/bun/process.zig index 2fabfad1cd..f1385a5387 100644 --- a/src/bun.js/api/bun/process.zig +++ b/src/bun.js/api/bun/process.zig @@ -500,7 +500,7 @@ pub const Process = struct { .waiter_thread, .fd => { const err = std.c.kill(this.pid, signal); if (err != 0) { - const errno_ = bun.C.getErrno(err); + const errno_ = bun.sys.getErrno(err); // if the process was already killed don't throw if (errno_ != .SRCH) @@ -514,7 +514,7 @@ pub const Process = struct { .uv => |*handle| { if (handle.kill(signal).toError(.kill)) |err| { // if the process was already killed don't throw - if (err.errno != @intFromEnum(bun.C.E.SRCH)) { + if (err.errno != @intFromEnum(bun.sys.E.SRCH)) { return .{ .err = err }; } } @@ -1141,7 +1141,7 @@ pub const PosixSpawnResult = struct { pidfd_flags, ); while (true) { - switch (bun.C.getErrno(rc)) { + switch (bun.sys.getErrno(rc)) { .SUCCESS => return JSC.Maybe(PidFDType){ .result = @intCast(rc) }, .INTR => { rc = std.os.linux.pidfd_open( @@ -1825,9 +1825,9 @@ pub const sync = struct { chunks: std.ArrayList([]u8) = .{ .items = &.{}, .allocator = bun.default_allocator, .capacity = 0 }, pipe: *uv.Pipe, - err: bun.C.E = .SUCCESS, + err: bun.sys.E = .SUCCESS, context: *SyncWindowsProcess, - onDoneCallback: *const fn (*SyncWindowsProcess, tag: SyncWindowsProcess.OutFd, chunks: []const []u8, err: bun.C.E) void = &SyncWindowsProcess.onReaderDone, + onDoneCallback: *const fn (*SyncWindowsProcess, tag: SyncWindowsProcess.OutFd, chunks: []const []u8, err: bun.sys.E) void = &SyncWindowsProcess.onReaderDone, tag: SyncWindowsProcess.OutFd, pub const new = bun.TrivialNew(@This()); @@ -1840,7 +1840,7 @@ pub const sync = struct { this.chunks.append(@constCast(data)) catch bun.outOfMemory(); } - fn onError(this: *SyncWindowsPipeReader, err: bun.C.E) void { + fn onError(this: *SyncWindowsPipeReader, err: bun.sys.E) void { this.err = err; this.pipe.close(onClose); } @@ -1872,7 +1872,7 @@ pub const sync = struct { stderr: []const []u8 = &.{}, stdout: []const []u8 = &.{}, - err: bun.C.E = .SUCCESS, + err: bun.sys.E = .SUCCESS, waiting_count: u8 = 1, process: *Process, status: ?Status = null, @@ -1884,7 +1884,7 @@ pub const sync = struct { this.process.deref(); } - pub fn onReaderDone(this: *SyncWindowsProcess, tag: OutFd, chunks: []const []u8, err: bun.C.E) void { + pub fn onReaderDone(this: *SyncWindowsProcess, tag: OutFd, chunks: []const []u8, err: bun.sys.E) void { switch (tag) { .stderr => { this.stderr = chunks; @@ -2175,7 +2175,7 @@ pub const sync = struct { } const rc = std.c.poll(poll_fds.ptr, @intCast(poll_fds.len), -1); - switch (bun.C.getErrno(rc)) { + switch (bun.sys.getErrno(rc)) { .SUCCESS => {}, .AGAIN, .INTR => continue, else => |err| return .{ .err = bun.sys.Error.fromCode(err, .poll) }, diff --git a/src/bun.js/api/bun/socket.zig b/src/bun.js/api/bun/socket.zig index 07b5325ae8..e43d68b560 100644 --- a/src/bun.js/api/bun/socket.zig +++ b/src/bun.js/api/bun/socket.zig @@ -105,7 +105,8 @@ fn alwaysAllowSSLVerifyCallback(_: c_int, _: ?*BoringSSL.X509_STORE_CTX) callcon fn normalizeHost(input: anytype) @TypeOf(input) { return input; } -const BinaryType = JSC.BinaryType; + +const BinaryType = JSC.ArrayBuffer.BinaryType; const WrappedType = enum { none, @@ -598,7 +599,7 @@ pub const Listener = struct { const opts = args.ptr[0]; if (opts.isEmptyOrUndefinedOrNull() or opts.isBoolean() or !opts.isObject()) { - return globalObject.throwValue(JSC.toInvalidArguments("Expected options object", .{}, globalObject)); + return globalObject.throwValue(globalObject.toInvalidArguments("Expected options object", .{})); } const socket_obj = try opts.get(globalObject, "socket") orelse { @@ -707,10 +708,10 @@ pub const Listener = struct { hostname_or_unix.deinit(); } - const errno = @intFromEnum(bun.C.getErrno(@as(c_int, -1))); + const errno = @intFromEnum(bun.sys.getErrno(@as(c_int, -1))); if (errno != 0) { err.put(globalObject, ZigString.static("errno"), JSValue.jsNumber(errno)); - if (bun.C.SystemErrno.init(errno)) |str| { + if (bun.sys.SystemErrno.init(errno)) |str| { err.put(globalObject, ZigString.static("code"), ZigString.init(@tagName(str)).toJS(globalObject)); } } @@ -805,7 +806,7 @@ pub const Listener = struct { err.put(globalObject, ZigString.static("errno"), JSValue.jsNumber(errno)); err.put(globalObject, ZigString.static("address"), hostname_or_unix.toZigString().toJS(globalObject)); if (port) |p| err.put(globalObject, ZigString.static("port"), .jsNumber(p)); - if (bun.C.SystemErrno.init(errno)) |str| { + if (bun.sys.SystemErrno.init(errno)) |str| { err.put(globalObject, ZigString.static("code"), ZigString.init(@tagName(str)).toJS(globalObject)); } } @@ -1253,7 +1254,7 @@ pub const Listener = struct { SocketType.js.dataSetCached(socket.getThisValue(globalObject), globalObject, default_data); socket.flags.allow_half_open = socket_config.allowHalfOpen; socket.doConnect(connection) catch { - socket.handleConnectError(@intFromEnum(if (port == null) bun.C.SystemErrno.ENOENT else bun.C.SystemErrno.ECONNREFUSED)); + socket.handleConnectError(@intFromEnum(if (port == null) bun.sys.SystemErrno.ENOENT else bun.sys.SystemErrno.ECONNREFUSED)); return promise_value; }; @@ -1640,9 +1641,9 @@ fn NewSocket(comptime ssl: bool) type { .syscall = bun.String.static("connect"), // For some reason errno is 0 which causes this to be success. // Unix socket emits ENOENT - .code = if (errno == @intFromEnum(bun.C.SystemErrno.ENOENT)) bun.String.static("ENOENT") else bun.String.static("ECONNREFUSED"), + .code = if (errno == @intFromEnum(bun.sys.SystemErrno.ENOENT)) bun.String.static("ENOENT") else bun.String.static("ECONNREFUSED"), // .code = bun.String.static(@tagName(bun.sys.getErrno(errno))), - // .code = bun.String.static(@tagName(@as(bun.C.E, @enumFromInt(errno)))), + // .code = bun.String.static(@tagName(@as(bun.sys.E, @enumFromInt(errno)))), }; vm.eventLoop().enter(); defer { @@ -2351,7 +2352,7 @@ fn NewSocket(comptime ssl: bool) type { if (written > 0) { if (remaining_in_buffered_data.len > 0) { var input_buffer = this.buffered_data_for_node_net.slice(); - bun.C.memmove(input_buffer.ptr, input_buffer.ptr[written..], remaining_in_buffered_data.len); + _ = bun.c.memmove(input_buffer.ptr, input_buffer.ptr[written..], remaining_in_buffered_data.len); this.buffered_data_for_node_net.len = @truncate(remaining_in_buffered_data.len); } } @@ -2379,7 +2380,7 @@ fn NewSocket(comptime ssl: bool) type { const len = @as(usize, @intCast(this.buffered_data_for_node_net.len)) - wrote; bun.debugAssert(len <= this.buffered_data_for_node_net.len); bun.debugAssert(len <= this.buffered_data_for_node_net.cap); - bun.C.memmove(this.buffered_data_for_node_net.ptr, this.buffered_data_for_node_net.ptr[wrote..], len); + _ = bun.c.memmove(this.buffered_data_for_node_net.ptr, this.buffered_data_for_node_net.ptr[wrote..], len); this.buffered_data_for_node_net.len = @truncate(len); } } @@ -2553,7 +2554,7 @@ fn NewSocket(comptime ssl: bool) type { if (written > 0) { if (this.buffered_data_for_node_net.len > written) { const remaining = this.buffered_data_for_node_net.slice()[written..]; - bun.C.memmove(this.buffered_data_for_node_net.ptr, remaining.ptr, remaining.len); + _ = bun.c.memmove(this.buffered_data_for_node_net.ptr, remaining.ptr, remaining.len); this.buffered_data_for_node_net.len = @truncate(remaining.len); } else { this.buffered_data_for_node_net.deinitWithAllocator(bun.default_allocator); @@ -3883,7 +3884,7 @@ pub const DuplexUpgradeContext = struct { } } else { if (this.tls) |tls| { - tls.handleConnectError(@intFromEnum(bun.C.SystemErrno.ECONNREFUSED)); + tls.handleConnectError(@intFromEnum(bun.sys.SystemErrno.ECONNREFUSED)); } } } @@ -3916,7 +3917,7 @@ pub const DuplexUpgradeContext = struct { bun.outOfMemory(); }, else => { - const errno = @intFromEnum(bun.C.SystemErrno.ECONNREFUSED); + const errno = @intFromEnum(bun.sys.SystemErrno.ECONNREFUSED); if (this.tls) |tls| { const socket = TLSSocket.Socket.fromDuplex(&this.upgrade); @@ -4281,10 +4282,10 @@ pub const WindowsNamedPipeContext = if (Environment.isWindows) struct { errdefer { switch (socket) { .tls => |tls| { - tls.handleConnectError(@intFromEnum(bun.C.SystemErrno.ENOENT)); + tls.handleConnectError(@intFromEnum(bun.sys.SystemErrno.ENOENT)); }, .tcp => |tcp| { - tcp.handleConnectError(@intFromEnum(bun.C.SystemErrno.ENOENT)); + tcp.handleConnectError(@intFromEnum(bun.sys.SystemErrno.ENOENT)); }, .none => {}, } @@ -4301,10 +4302,10 @@ pub const WindowsNamedPipeContext = if (Environment.isWindows) struct { errdefer { switch (socket) { .tls => |tls| { - tls.handleConnectError(@intFromEnum(bun.C.SystemErrno.ENOENT)); + tls.handleConnectError(@intFromEnum(bun.sys.SystemErrno.ENOENT)); }, .tcp => |tcp| { - tcp.handleConnectError(@intFromEnum(bun.C.SystemErrno.ENOENT)); + tcp.handleConnectError(@intFromEnum(bun.sys.SystemErrno.ENOENT)); }, .none => {}, } @@ -4517,7 +4518,7 @@ pub fn jsCreateSocketPair(global: *JSC.JSGlobalObject, _: *JSC.CallFrame) bun.JS var fds_: [2]std.c.fd_t = .{ 0, 0 }; const rc = std.c.socketpair(std.posix.AF.UNIX, std.posix.SOCK.STREAM, 0, &fds_); if (rc != 0) { - const err = bun.sys.Error.fromCode(bun.C.getErrno(rc), .socketpair); + const err = bun.sys.Error.fromCode(bun.sys.getErrno(rc), .socketpair); return global.throwValue(err.toJSC(global)); } diff --git a/src/bun.js/api/bun/socket/SocketAddress.zig b/src/bun.js/api/bun/socket/SocketAddress.zig index cd299ff39f..865ef39707 100644 --- a/src/bun.js/api/bun/socket/SocketAddress.zig +++ b/src/bun.js/api/bun/socket/SocketAddress.zig @@ -412,7 +412,7 @@ pub fn address(this: *SocketAddress) bun.String { if (comptime bun.Environment.isDebug) { bun.assertWithLocation(bun.strings.isAllASCII(formatted), @src()); } - const presentation = bun.JSC.WebCore.Encoder.toBunStringComptime(formatted, .latin1); + const presentation = bun.webcore.encoding.toBunStringComptime(formatted, .latin1); bun.debugAssert(presentation.tag != .Dead); this._presentation = presentation; return presentation; diff --git a/src/bun.js/api/bun/spawn.zig b/src/bun.js/api/bun/spawn.zig index 379eea5d9a..4a8d55bbfc 100644 --- a/src/bun.js/api/bun/spawn.zig +++ b/src/bun.js/api/bun/spawn.zig @@ -128,7 +128,7 @@ pub const BunSpawn = struct { } pub fn set(self: *Attr, flags: u16) !void { - self.detached = (flags & bun.C.POSIX_SPAWN_SETSID) != 0; + self.detached = (flags & bun.c.POSIX_SPAWN_SETSID) != 0; } pub fn resetSignals(this: *Attr) !void { @@ -372,7 +372,7 @@ pub const PosixSpawn = struct { }); // Unlike most syscalls, posix_spawn returns 0 on success and an errno on failure. - // That is why bun.C.getErrno() is not used here, since that checks for -1. + // That is why bun.sys.getErrno() is not used here, since that checks for -1. if (rc == 0) { return Maybe(pid_t){ .result = pid }; } diff --git a/src/bun.js/api/bun/spawn/stdio.zig b/src/bun.js/api/bun/spawn/stdio.zig index 3467915f26..33d3382345 100644 --- a/src/bun.js/api/bun/spawn/stdio.zig +++ b/src/bun.js/api/bun/spawn/stdio.zig @@ -23,7 +23,7 @@ pub const Stdio = union(enum) { to: bun.JSC.Subprocess.StdioKind, }, path: JSC.Node.PathLike, - blob: JSC.WebCore.AnyBlob, + blob: JSC.WebCore.Blob.Any, array_buffer: JSC.ArrayBuffer.Strong, memfd: bun.FileDescriptor, pipe, @@ -47,7 +47,7 @@ pub const Stdio = union(enum) { stdin_used_as_out, out_used_as_stdin, blob_used_as_out, - uv_pipe: bun.C.E, + uv_pipe: bun.sys.E, pub fn toStr(this: *const @This()) []const u8 { return switch (this.*) { @@ -400,7 +400,7 @@ pub const Stdio = union(enum) { return globalThis.throwInvalidArguments("stdio must be an array of 'inherit', 'ignore', or null", .{}); } - pub fn extractBlob(stdio: *Stdio, globalThis: *JSC.JSGlobalObject, blob: JSC.WebCore.AnyBlob, i: i32) bun.JSError!void { + pub fn extractBlob(stdio: *Stdio, globalThis: *JSC.JSGlobalObject, blob: JSC.WebCore.Blob.Any, i: i32) bun.JSError!void { const fd = bun.FD.Stdio.fromInt(i).?.fd(); if (blob.needsToReadFile()) { diff --git a/src/bun.js/api/bun/subprocess.zig b/src/bun.js/api/bun/subprocess.zig index a90f727dca..a5e50b24b2 100644 --- a/src/bun.js/api/bun/subprocess.zig +++ b/src/bun.js/api/bun/subprocess.zig @@ -34,10 +34,10 @@ ipc_data: ?IPC.IPCData, flags: Flags = .{}, weak_file_sink_stdin_ptr: ?*JSC.WebCore.FileSink = null, -abort_signal: ?*JSC.AbortSignal = null, +abort_signal: ?*webcore.AbortSignal = null, event_loop_timer_refd: bool = false, -event_loop_timer: JSC.API.Bun.Timer.EventLoopTimer = .{ +event_loop_timer: bun.api.Timer.EventLoopTimer = .{ .tag = .SubprocessTimeout, .next = .{ .sec = 0, @@ -626,7 +626,7 @@ fn setEventLoopTimerRefd(this: *Subprocess, refd: bool) void { } } -pub fn timeoutCallback(this: *Subprocess) JSC.API.Bun.Timer.EventLoopTimer.Arm { +pub fn timeoutCallback(this: *Subprocess) bun.api.Timer.EventLoopTimer.Arm { this.setEventLoopTimerRefd(false); if (this.event_loop_timer.state == .CANCELLED) return .disarm; if (this.hasExited()) { @@ -792,17 +792,17 @@ pub fn doSend(this: *Subprocess, global: *JSC.JSGlobalObject, callFrame: *JSC.Ca if (good) { if (callback.isFunction()) { - JSC.Bun__Process__queueNextTick1(global, callback, .null); + callback.callNextTick(global, .{JSValue.null}); // we need to wait until the send is actually completed to trigger the callback } } else { const ex = global.createTypeErrorInstance("process.send() failed", .{}); ex.put(global, JSC.ZigString.static("syscall"), bun.String.static("write").toJS(global)); if (callback.isFunction()) { - JSC.Bun__Process__queueNextTick1(global, callback, ex); + callback.callNextTick(global, .{ex}); } else { const fnvalue = JSC.JSFunction.create(global, "", S.impl, 1, .{}); - JSC.Bun__Process__queueNextTick1(global, fnvalue, ex); + fnvalue.callNextTick(global, .{ex}); } } @@ -873,7 +873,7 @@ pub fn getStdio( } pub const Source = union(enum) { - blob: JSC.WebCore.AnyBlob, + blob: JSC.WebCore.Blob.Any, array_buffer: JSC.ArrayBuffer.Strong, detached: void, @@ -1477,7 +1477,7 @@ const Writable = union(enum) { } return pipe.toJSWithDestructor( globalThis, - JSC.WebCore.SinkDestructor.Ptr.init(subprocess), + JSC.WebCore.Sink.DestructorPtr.init(subprocess), ); } }, @@ -2300,7 +2300,7 @@ pub fn spawnMaybeSync( ""; if (display_path.len > 0) { var systemerror = err.withPath(display_path).toSystemError(); - if (errno == .NOENT) systemerror.errno = -bun.C.UV_ENOENT; + if (errno == .NOENT) systemerror.errno = -bun.sys.UV_E.NOENT; return globalThis.throwValue(systemerror.toErrorInstance(globalThis)); } }, @@ -2433,7 +2433,7 @@ pub fn spawnMaybeSync( } if (subprocess.stdin == .pipe) { - subprocess.stdin.pipe.signal = JSC.WebCore.Signal.init(&subprocess.stdin); + subprocess.stdin.pipe.signal = JSC.WebCore.streams.Signal.init(&subprocess.stdin); } const out = if (comptime !is_sync) @@ -2605,7 +2605,7 @@ fn throwCommandNotFound(globalThis: *JSC.JSGlobalObject, command: []const u8) bu const err = JSC.SystemError{ .message = bun.String.createFormat("Executable not found in $PATH: \"{s}\"", .{command}) catch bun.outOfMemory(), .code = bun.String.static("ENOENT"), - .errno = -bun.C.UV_ENOENT, + .errno = -bun.sys.UV_E.NOENT, .path = bun.String.createUTF8(command), }; return globalThis.throwValue(err.toErrorInstance(globalThis)); @@ -2695,6 +2695,7 @@ const MutableString = bun.MutableString; const std = @import("std"); const Allocator = std.mem.Allocator; const JSC = bun.JSC; +const webcore = bun.webcore; const JSValue = JSC.JSValue; const JSGlobalObject = JSC.JSGlobalObject; const which = bun.which; diff --git a/src/bun.js/api/bun/udp_socket.zig b/src/bun.js/api/bun/udp_socket.zig index 9adc502f95..41f58ff9ba 100644 --- a/src/bun.js/api/bun/udp_socket.zig +++ b/src/bun.js/api/bun/udp_socket.zig @@ -133,7 +133,7 @@ pub const UDPSocketConfig = struct { connect: ?ConnectConfig = null, port: u16, flags: i32, - binary_type: JSC.BinaryType = .Buffer, + binary_type: JSC.ArrayBuffer.BinaryType = .Buffer, on_data: JSValue = .zero, on_drain: JSValue = .zero, on_error: JSValue = .zero, @@ -190,7 +190,7 @@ pub const UDPSocketConfig = struct { return globalThis.throwInvalidArguments("Expected \"socket.binaryType\" to be a string", .{}); } - config.binary_type = try JSC.BinaryType.fromJSValue(globalThis, value) orelse { + config.binary_type = try JSC.ArrayBuffer.BinaryType.fromJSValue(globalThis, value) orelse { return globalThis.throwInvalidArguments("Expected \"socket.binaryType\" to be 'arraybuffer', 'uint8array', or 'buffer'", .{}); }; } @@ -333,7 +333,7 @@ pub const UDPSocket = struct { this.closed = true; defer this.deinit(); if (err != 0) { - const code = @tagName(bun.C.SystemErrno.init(@as(c_int, @intCast(err))).?); + const code = @tagName(bun.sys.SystemErrno.init(@as(c_int, @intCast(err))).?); const sys_err = JSC.SystemError{ .errno = err, .code = bun.String.static(code), @@ -570,7 +570,7 @@ pub const UDPSocket = struct { } } - return bun.JSC.Maybe(void).errno(@as(bun.C.E, @enumFromInt(std.c._errno().*)), tag); + return bun.JSC.Maybe(void).errno(@as(bun.sys.E, @enumFromInt(std.c._errno().*)), tag); } else { return bun.JSC.Maybe(void).errnoSys(res, tag); } diff --git a/src/bun.js/api/crypto/CryptoHasher.zig b/src/bun.js/api/crypto/CryptoHasher.zig index 959fbc1e1d..759d1fd167 100644 --- a/src/bun.js/api/crypto/CryptoHasher.zig +++ b/src/bun.js/api/crypto/CryptoHasher.zig @@ -128,8 +128,8 @@ pub const CryptoHasher = union(enum) { } }; - pub const digest = JSC.wrapInstanceMethod(CryptoHasher, "digest_", false); - pub const hash = JSC.wrapStaticMethod(CryptoHasher, "hash_", false); + pub const digest = JSC.host_fn.wrapInstanceMethod(CryptoHasher, "digest_", false); + pub const hash = JSC.host_fn.wrapStaticMethod(CryptoHasher, "hash_", false); fn throwHmacConsumed(globalThis: *JSC.JSGlobalObject) bun.JSError { return globalThis.throw("HMAC has been consumed and is no longer usable", .{}); @@ -669,8 +669,8 @@ fn StaticCryptoHasher(comptime Hasher: type, comptime name: [:0]const u8) type { pub const fromJS = js.fromJS; pub const fromJSDirect = js.fromJSDirect; - pub const digest = JSC.wrapInstanceMethod(ThisHasher, "digest_", false); - pub const hash = JSC.wrapStaticMethod(ThisHasher, "hash_", false); + pub const digest = host_fn.wrapInstanceMethod(ThisHasher, "digest_", false); + pub const hash = host_fn.wrapStaticMethod(ThisHasher, "hash_", false); pub fn getByteLength( _: *@This(), @@ -898,3 +898,4 @@ const EVP = Crypto.EVP; const BoringSSL = bun.BoringSSL.c; const createCryptoError = Crypto.createCryptoError; const VirtualMachine = JSC.VirtualMachine; +const host_fn = bun.jsc.host_fn; diff --git a/src/bun.js/api/ffi.zig b/src/bun.js/api/ffi.zig index a9c158c4df..b94869b4cf 100644 --- a/src/bun.js/api/ffi.zig +++ b/src/bun.js/api/ffi.zig @@ -14,9 +14,7 @@ const Fs = @import("../../fs.zig"); const options = @import("../../options.zig"); const ZigString = bun.JSC.ZigString; const JSC = bun.JSC; -const JSError = @import("../base.zig").JSError; -const getAllocator = @import("../base.zig").getAllocator; const JSValue = bun.JSC.JSValue; const JSGlobalObject = bun.JSC.JSGlobalObject; @@ -758,10 +756,10 @@ pub const FFI = struct { function.compile(napi_env) catch |err| { if (!globalThis.hasException()) { - const ret = JSC.toInvalidArguments("{s} when translating symbol \"{s}\"", .{ + const ret = globalThis.toInvalidArguments("{s} when translating symbol \"{s}\"", .{ @errorName(err), function_name, - }, globalThis); + }); return globalThis.throwValue(ret); } return error.JSError; @@ -776,11 +774,11 @@ pub const FFI = struct { }, .compiled => |*compiled| { const str = ZigString.init(bun.asByteSlice(function_name)); - const cb = JSC.NewRuntimeFunction( + const cb = JSC.host_fn.NewRuntimeFunction( globalThis, &str, @as(u32, @intCast(function.arg_types.items.len)), - bun.cast(JSC.JSHostFunctionPtr, compiled.ptr), + bun.cast(*const JSC.JSHostFn, compiled.ptr), false, true, function.symbol_from_dynamic_library, @@ -817,11 +815,11 @@ pub const FFI = struct { pub fn callback(globalThis: *JSGlobalObject, interface: JSC.JSValue, js_callback: JSC.JSValue) JSValue { JSC.markBinding(@src()); if (!interface.isObject()) { - return JSC.toInvalidArguments("Expected object", .{}, globalThis); + return globalThis.toInvalidArguments("Expected object", .{}); } if (js_callback.isEmptyOrUndefinedOrNull() or !js_callback.isCallable()) { - return JSC.toInvalidArguments("Expected callback function", .{}, globalThis); + return globalThis.toInvalidArguments("Expected callback function", .{}); } const allocator = VirtualMachine.get().allocator; @@ -906,7 +904,7 @@ pub const FFI = struct { const allocator = VirtualMachine.get().allocator; if (object.isEmptyOrUndefinedOrNull() or !object.isObject()) { - return JSC.toInvalidArguments("Expected an object", .{}, global); + return global.toInvalidArguments("Expected an object", .{}); } var function: Function = .{ .allocator = allocator }; @@ -991,7 +989,7 @@ pub const FFI = struct { /// Creates an Exception object indicating that options object is invalid. /// The exception is not thrown on the VM. fn invalidOptionsArg(global: *JSGlobalObject) JSValue { - return JSC.toInvalidArguments("Expected an options object with symbol names", .{}, global); + return global.toInvalidArguments("Expected an options object with symbol names", .{}); } pub fn open(global: *JSGlobalObject, name_str: ZigString, object_value: JSC.JSValue) JSC.JSValue { @@ -1024,7 +1022,7 @@ pub const FFI = struct { }; if (name.len == 0) { - return JSC.toInvalidArguments("Invalid library name", .{}, global); + return global.toInvalidArguments("Invalid library name", .{}); } var symbols = bun.StringArrayHashMapUnmanaged(Function){}; @@ -1037,7 +1035,7 @@ pub const FFI = struct { return val; } if (symbols.count() == 0) { - return JSC.toInvalidArguments("Expected at least one symbol", .{}, global); + return global.toInvalidArguments("Expected at least one symbol", .{}); } var dylib: std.DynLib = brk: { @@ -1073,7 +1071,7 @@ pub const FFI = struct { // optional if the user passed "ptr" if (function.symbol_from_dynamic_library == null) { const resolved_symbol = dylib.lookup(*anyopaque, function_name) orelse { - const ret = JSC.toInvalidArguments("Symbol \"{s}\" not found in \"{s}\"", .{ bun.asByteSlice(function_name), name }, global); + const ret = global.toInvalidArguments("Symbol \"{s}\" not found in \"{s}\"", .{ bun.asByteSlice(function_name), name }); for (symbols.values()) |*value| { bun.default_allocator.free(@constCast(bun.asByteSlice(value.base_name.?))); value.arg_types.clearAndFree(bun.default_allocator); @@ -1087,11 +1085,11 @@ pub const FFI = struct { } function.compile(napi_env) catch |err| { - const ret = JSC.toInvalidArguments("{s} when compiling symbol \"{s}\" in \"{s}\"", .{ + const ret = global.toInvalidArguments("{s} when compiling symbol \"{s}\" in \"{s}\"", .{ bun.asByteSlice(@errorName(err)), bun.asByteSlice(function_name), name, - }, global); + }); for (symbols.values()) |*value| { value.deinit(global); } @@ -1120,11 +1118,11 @@ pub const FFI = struct { }, .compiled => |*compiled| { const str = ZigString.init(bun.asByteSlice(function_name)); - const cb = JSC.NewRuntimeFunction( + const cb = JSC.host_fn.NewRuntimeFunction( global, &str, @as(u32, @intCast(function.arg_types.items.len)), - bun.cast(JSC.JSHostFunctionPtr, compiled.ptr), + bun.cast(*const JSC.JSHostFn, compiled.ptr), false, true, function.symbol_from_dynamic_library, @@ -1167,7 +1165,7 @@ pub const FFI = struct { return val; } if (symbols.count() == 0) { - return JSC.toInvalidArguments("Expected at least one symbol", .{}, global); + return global.toInvalidArguments("Expected at least one symbol", .{}); } var obj = JSValue.createEmptyObject(global, symbols.count()); @@ -1180,7 +1178,7 @@ pub const FFI = struct { const function_name = function.base_name.?; if (function.symbol_from_dynamic_library == null) { - const ret = JSC.toInvalidArguments("Symbol for \"{s}\" not found", .{bun.asByteSlice(function_name)}, global); + const ret = global.toInvalidArguments("Symbol for \"{s}\" not found", .{bun.asByteSlice(function_name)}); for (symbols.values()) |*value| { allocator.free(@constCast(bun.asByteSlice(value.base_name.?))); value.arg_types.clearAndFree(allocator); @@ -1190,10 +1188,10 @@ pub const FFI = struct { } function.compile(napi_env) catch |err| { - const ret = JSC.toInvalidArguments("{s} when compiling symbol \"{s}\"", .{ + const ret = global.toInvalidArguments("{s} when compiling symbol \"{s}\"", .{ bun.asByteSlice(@errorName(err)), bun.asByteSlice(function_name), - }, global); + }); for (symbols.values()) |*value| { value.deinit(global); } @@ -1223,11 +1221,11 @@ pub const FFI = struct { .compiled => |*compiled| { const name = &ZigString.init(bun.asByteSlice(function_name)); - const cb = JSC.NewRuntimeFunction( + const cb = JSC.host_fn.NewRuntimeFunction( global, name, @as(u32, @intCast(function.arg_types.items.len)), - bun.cast(JSC.JSHostFunctionPtr, compiled.ptr), + bun.cast(*JSC.JSHostFn, compiled.ptr), false, true, function.symbol_from_dynamic_library, @@ -1290,7 +1288,7 @@ pub const FFI = struct { defer type_name.deinit(); abi_types.appendAssumeCapacity(ABIType.label.get(type_name.slice()) orelse { abi_types.clearAndFree(allocator); - return JSC.toTypeError(.INVALID_ARG_VALUE, "Unknown type {s}", .{type_name.slice()}, global); + return global.toTypeError(.INVALID_ARG_VALUE, "Unknown type {s}", .{type_name.slice()}); }); } } @@ -1322,7 +1320,7 @@ pub const FFI = struct { defer ret_slice.deinit(); return_type = ABIType.label.get(ret_slice.slice()) orelse { abi_types.clearAndFree(allocator); - return JSC.toTypeError(.INVALID_ARG_VALUE, "Unknown return type {s}", .{ret_slice.slice()}, global); + return global.toTypeError(.INVALID_ARG_VALUE, "Unknown return type {s}", .{ret_slice.slice()}); }; } @@ -1381,7 +1379,7 @@ pub const FFI = struct { const value = symbols_iter.value; if (value.isEmptyOrUndefinedOrNull()) { - return JSC.toTypeError(.INVALID_ARG_VALUE, "Expected an object for key \"{any}\"", .{prop}, global); + return global.toTypeError(.INVALID_ARG_VALUE, "Expected an object for key \"{any}\"", .{prop}); } var function: Function = .{ .allocator = allocator }; @@ -2401,8 +2399,8 @@ const CompilerRT = struct { pub fn inject(state: *TCC.State) void { state.addSymbol("memset", &memset) catch unreachable; state.addSymbol("memcpy", &memcpy) catch unreachable; - state.addSymbol("NapiHandleScope__open", &bun.JSC.napi.NapiHandleScope.NapiHandleScope__open) catch unreachable; - state.addSymbol("NapiHandleScope__close", &bun.JSC.napi.NapiHandleScope.NapiHandleScope__close) catch unreachable; + state.addSymbol("NapiHandleScope__open", &bun.api.napi.NapiHandleScope.NapiHandleScope__open) catch unreachable; + state.addSymbol("NapiHandleScope__close", &bun.api.napi.NapiHandleScope.NapiHandleScope__close) catch unreachable; state.addSymbol("JSVALUE_TO_INT64_SLOW", workaround.JSVALUE_TO_INT64) catch unreachable; state.addSymbol("JSVALUE_TO_UINT64_SLOW", workaround.JSVALUE_TO_UINT64) catch unreachable; diff --git a/src/bun.js/api/filesystem_router.zig b/src/bun.js/api/filesystem_router.zig index 64759f9abd..261dacc7e6 100644 --- a/src/bun.js/api/filesystem_router.zig +++ b/src/bun.js/api/filesystem_router.zig @@ -10,14 +10,10 @@ const Transpiler = bun.transpiler; const ScriptSrcStream = std.io.FixedBufferStream([]u8); const ZigString = JSC.ZigString; const Fs = @import("../../fs.zig"); -const Base = @import("../base.zig"); -const getAllocator = Base.getAllocator; const JSObject = JSC.JSObject; -const JSError = Base.JSError; const JSValue = JSC.JSValue; const JSGlobalObject = JSC.JSGlobalObject; const strings = bun.strings; -const To = Base.To; const Request = WebCore.Request; const Environment = bun.Environment; const URLPath = @import("../../http/url_path.zig"); diff --git a/src/bun.js/api/glob.zig b/src/bun.js/api/glob.zig index 7e9b69fefe..739cbed563 100644 --- a/src/bun.js/api/glob.zig +++ b/src/bun.js/api/glob.zig @@ -2,7 +2,7 @@ const Glob = @This(); const globImpl = @import("../../glob.zig"); const GlobWalker = globImpl.BunGlobWalker; const PathLike = @import("../node/types.zig").PathLike; -const ArgumentsSlice = @import("../node/types.zig").ArgumentsSlice; +const ArgumentsSlice = JSC.CallFrame.ArgumentsSlice; const Syscall = @import("../../sys.zig"); const std = @import("std"); const Allocator = std.mem.Allocator; @@ -11,12 +11,10 @@ const bun = @import("bun"); const BunString = bun.String; const string = bun.string; const JSC = bun.JSC; -const JSArray = @import("../bindings/bindings.zig").JSArray; -const JSValue = @import("../bindings/bindings.zig").JSValue; -const ZigString = @import("../bindings/bindings.zig").ZigString; -const Base = @import("../base.zig"); -const JSGlobalObject = @import("../bindings/bindings.zig").JSGlobalObject; -const getAllocator = Base.getAllocator; +const JSArray = JSC.JSArray; +const JSValue = JSC.JSValue; +const ZigString = JSC.ZigString; +const JSGlobalObject = JSC.JSGlobalObject; const ResolvePath = @import("../../resolver/resolve_path.zig"); const isAllAscii = @import("../../string_immutable.zig").isAllASCII; const CodepointIterator = @import("../../string_immutable.zig").UnsignedCodepointIterator; @@ -282,10 +280,10 @@ fn makeGlobWalker( } pub fn constructor(globalThis: *JSC.JSGlobalObject, callframe: *JSC.CallFrame) bun.JSError!*Glob { - const alloc = getAllocator(globalThis); + const alloc = bun.default_allocator; const arguments_ = callframe.arguments_old(1); - var arguments = JSC.Node.ArgumentsSlice.init(globalThis.bunVM(), arguments_.slice()); + var arguments = JSC.CallFrame.ArgumentsSlice.init(globalThis.bunVM(), arguments_.slice()); defer arguments.deinit(); const pat_arg: JSValue = arguments.nextEat() orelse { return globalThis.throw("Glob.constructor: expected 1 arguments, got 0", .{}); @@ -327,10 +325,10 @@ fn decrPendingActivityFlag(has_pending_activity: *std.atomic.Value(usize)) void } pub fn __scan(this: *Glob, globalThis: *JSGlobalObject, callframe: *JSC.CallFrame) bun.JSError!JSC.JSValue { - const alloc = getAllocator(globalThis); + const alloc = bun.default_allocator; const arguments_ = callframe.arguments_old(1); - var arguments = JSC.Node.ArgumentsSlice.init(globalThis.bunVM(), arguments_.slice()); + var arguments = JSC.CallFrame.ArgumentsSlice.init(globalThis.bunVM(), arguments_.slice()); defer arguments.deinit(); var arena = std.heap.ArenaAllocator.init(alloc); @@ -350,10 +348,10 @@ pub fn __scan(this: *Glob, globalThis: *JSGlobalObject, callframe: *JSC.CallFram } pub fn __scanSync(this: *Glob, globalThis: *JSGlobalObject, callframe: *JSC.CallFrame) bun.JSError!JSC.JSValue { - const alloc = getAllocator(globalThis); + const alloc = bun.default_allocator; const arguments_ = callframe.arguments_old(1); - var arguments = JSC.Node.ArgumentsSlice.init(globalThis.bunVM(), arguments_.slice()); + var arguments = JSC.CallFrame.ArgumentsSlice.init(globalThis.bunVM(), arguments_.slice()); defer arguments.deinit(); var arena = std.heap.ArenaAllocator.init(alloc); @@ -376,12 +374,12 @@ pub fn __scanSync(this: *Glob, globalThis: *JSGlobalObject, callframe: *JSC.Call } pub fn match(this: *Glob, globalThis: *JSGlobalObject, callframe: *JSC.CallFrame) bun.JSError!JSC.JSValue { - const alloc = getAllocator(globalThis); + const alloc = bun.default_allocator; var arena = Arena.init(alloc); defer arena.deinit(); const arguments_ = callframe.arguments_old(1); - var arguments = JSC.Node.ArgumentsSlice.init(globalThis.bunVM(), arguments_.slice()); + var arguments = JSC.CallFrame.ArgumentsSlice.init(globalThis.bunVM(), arguments_.slice()); defer arguments.deinit(); const str_arg = arguments.nextEat() orelse { return globalThis.throw("Glob.matchString: expected 1 arguments, got 0", .{}); diff --git a/src/bun.js/api/html_rewriter.zig b/src/bun.js/api/html_rewriter.zig index 597cf05e82..67e9b4f98b 100644 --- a/src/bun.js/api/html_rewriter.zig +++ b/src/bun.js/api/html_rewriter.zig @@ -2,14 +2,12 @@ const std = @import("std"); const bun = @import("bun"); const string = bun.string; const JSC = bun.JSC; -const WebCore = @import("../webcore/response.zig"); const ZigString = JSC.ZigString; -const Base = @import("../base.zig"); -const getAllocator = Base.getAllocator; const JSValue = JSC.JSValue; const JSGlobalObject = JSC.JSGlobalObject; -const Response = WebCore.Response; +const Response = bun.webcore.Response; const LOLHTML = bun.LOLHTML; +const host_fn = JSC.host_fn; const SelectorMap = std.ArrayListUnmanaged(*LOLHTML.HTMLSelector); pub const LOLHTMLContext = struct { @@ -77,7 +75,7 @@ pub const HTMLRewriter = struct { var selector = LOLHTML.HTMLSelector.parse(selector_slice) catch return createLOLHTMLError(global); const handler_ = try ElementHandler.init(global, listener); - const handler = getAllocator(global).create(ElementHandler) catch bun.outOfMemory(); + const handler = bun.default_allocator.create(ElementHandler) catch bun.outOfMemory(); handler.* = handler_; this.builder.addElementContentHandlers( @@ -121,7 +119,7 @@ pub const HTMLRewriter = struct { ) bun.JSError!JSValue { const handler_ = try DocumentHandler.init(global, listener); - const handler = getAllocator(global).create(DocumentHandler) catch bun.outOfMemory(); + const handler = bun.default_allocator.create(DocumentHandler) catch bun.outOfMemory(); handler.* = handler_; // If this fails, subsequent calls to write or end should throw @@ -238,9 +236,9 @@ pub const HTMLRewriter = struct { return global.throwInvalidArguments("Expected Response or Body", .{}); } - pub const on = JSC.wrapInstanceMethod(HTMLRewriter, "on_", false); - pub const onDocument = JSC.wrapInstanceMethod(HTMLRewriter, "onDocument_", false); - pub const transform = JSC.wrapInstanceMethod(HTMLRewriter, "transform_", false); + pub const on = host_fn.wrapInstanceMethod(HTMLRewriter, "on_", false); + pub const onDocument = host_fn.wrapInstanceMethod(HTMLRewriter, "onDocument_", false); + pub const transform = host_fn.wrapInstanceMethod(HTMLRewriter, "transform_", false); pub const HTMLRewriterLoader = struct { rewriter: *LOLHTML.HTMLRewriter, @@ -404,7 +402,7 @@ pub const HTMLRewriter = struct { context: *LOLHTMLContext, response: *Response, response_value: JSC.Strong = .empty, - bodyValueBufferer: ?JSC.WebCore.BodyValueBufferer = null, + bodyValueBufferer: ?JSC.WebCore.Body.ValueBufferer = null, tmp_sync_error: ?*JSC.JSValue = null, // const log = bun.Output.scoped(.BufferOutputSink, false); @@ -487,7 +485,7 @@ pub const HTMLRewriter = struct { const value = original.getBodyValue(); sink.ref(); - sink.bodyValueBufferer = JSC.WebCore.BodyValueBufferer.init(sink, @ptrCast(&onFinishedBuffering), sink.global, bun.default_allocator); + sink.bodyValueBufferer = JSC.WebCore.Body.ValueBufferer.init(sink, @ptrCast(&onFinishedBuffering), sink.global, bun.default_allocator); response_js_value.ensureStillAlive(); sink.bodyValueBufferer.?.run(value) catch |buffering_error| { @@ -1137,9 +1135,9 @@ pub const TextChunk = struct { return this.contentHandler(LOLHTML.TextChunk.replace, callFrame.this(), globalObject, content, contentOptions); } - pub const before = JSC.wrapInstanceMethod(TextChunk, "before_", false); - pub const after = JSC.wrapInstanceMethod(TextChunk, "after_", false); - pub const replace = JSC.wrapInstanceMethod(TextChunk, "replace_", false); + pub const before = host_fn.wrapInstanceMethod(TextChunk, "before_", false); + pub const after = host_fn.wrapInstanceMethod(TextChunk, "after_", false); + pub const replace = host_fn.wrapInstanceMethod(TextChunk, "replace_", false); pub fn remove( this: *TextChunk, @@ -1314,7 +1312,7 @@ pub const DocEnd = struct { return this.contentHandler(LOLHTML.DocEnd.append, callFrame.this(), globalObject, content, contentOptions); } - pub const append = JSC.wrapInstanceMethod(DocEnd, "append_", false); + pub const append = host_fn.wrapInstanceMethod(DocEnd, "append_", false); pub fn finalize(this: *DocEnd) void { this.deref(); @@ -1391,9 +1389,9 @@ pub const Comment = struct { return this.contentHandler(LOLHTML.Comment.replace, callFrame.this(), globalObject, content, contentOptions); } - pub const before = JSC.wrapInstanceMethod(Comment, "before_", false); - pub const after = JSC.wrapInstanceMethod(Comment, "after_", false); - pub const replace = JSC.wrapInstanceMethod(Comment, "replace_", false); + pub const before = host_fn.wrapInstanceMethod(Comment, "before_", false); + pub const after = host_fn.wrapInstanceMethod(Comment, "after_", false); + pub const replace = host_fn.wrapInstanceMethod(Comment, "replace_", false); pub fn remove( this: *Comment, @@ -1541,9 +1539,9 @@ pub const EndTag = struct { return this.contentHandler(LOLHTML.EndTag.replace, callFrame.this(), globalObject, content, contentOptions); } - pub const before = JSC.wrapInstanceMethod(EndTag, "before_", false); - pub const after = JSC.wrapInstanceMethod(EndTag, "after_", false); - pub const replace = JSC.wrapInstanceMethod(EndTag, "replace_", false); + pub const before = host_fn.wrapInstanceMethod(EndTag, "before_", false); + pub const after = host_fn.wrapInstanceMethod(EndTag, "after_", false); + pub const replace = host_fn.wrapInstanceMethod(EndTag, "replace_", false); pub fn remove( this: *EndTag, @@ -1761,11 +1759,11 @@ pub const Element = struct { return callFrame.this(); } - pub const onEndTag = JSC.wrapInstanceMethod(Element, "onEndTag_", false); - pub const getAttribute = JSC.wrapInstanceMethod(Element, "getAttribute_", false); - pub const hasAttribute = JSC.wrapInstanceMethod(Element, "hasAttribute_", false); - pub const setAttribute = JSC.wrapInstanceMethod(Element, "setAttribute_", false); - pub const removeAttribute = JSC.wrapInstanceMethod(Element, "removeAttribute_", false); + pub const onEndTag = host_fn.wrapInstanceMethod(Element, "onEndTag_", false); + pub const getAttribute = host_fn.wrapInstanceMethod(Element, "getAttribute_", false); + pub const hasAttribute = host_fn.wrapInstanceMethod(Element, "hasAttribute_", false); + pub const setAttribute = host_fn.wrapInstanceMethod(Element, "setAttribute_", false); + pub const removeAttribute = host_fn.wrapInstanceMethod(Element, "removeAttribute_", false); fn contentHandler(this: *Element, comptime Callback: (fn (*LOLHTML.Element, []const u8, bool) LOLHTML.Error!void), thisObject: JSValue, globalObject: *JSGlobalObject, content: ZigString, contentOptions: ?ContentOptions) JSValue { if (this.element == null) @@ -1855,12 +1853,12 @@ pub const Element = struct { ); } - pub const before = JSC.wrapInstanceMethod(Element, "before_", false); - pub const after = JSC.wrapInstanceMethod(Element, "after_", false); - pub const prepend = JSC.wrapInstanceMethod(Element, "prepend_", false); - pub const append = JSC.wrapInstanceMethod(Element, "append_", false); - pub const replace = JSC.wrapInstanceMethod(Element, "replace_", false); - pub const setInnerContent = JSC.wrapInstanceMethod(Element, "setInnerContent_", false); + pub const before = host_fn.wrapInstanceMethod(Element, "before_", false); + pub const after = host_fn.wrapInstanceMethod(Element, "after_", false); + pub const prepend = host_fn.wrapInstanceMethod(Element, "prepend_", false); + pub const append = host_fn.wrapInstanceMethod(Element, "append_", false); + pub const replace = host_fn.wrapInstanceMethod(Element, "replace_", false); + pub const setInnerContent = host_fn.wrapInstanceMethod(Element, "setInnerContent_", false); /// Removes the element with all its content. pub fn remove( diff --git a/src/bun.js/api/server.zig b/src/bun.js/api/server.zig index 9e8d2addb2..489e7b7686 100644 --- a/src/bun.js/api/server.zig +++ b/src/bun.js/api/server.zig @@ -2,7 +2,7 @@ const Bun = @This(); const default_allocator = bun.default_allocator; const bun = @import("bun"); const Environment = bun.Environment; -const AnyBlob = bun.JSC.WebCore.AnyBlob; +const AnyBlob = bun.webcore.Blob.Any; const Global = bun.Global; const strings = bun.strings; const string = bun.string; @@ -43,9 +43,9 @@ const Fetch = WebCore.Fetch; const HTTP = bun.http; const FetchEvent = WebCore.FetchEvent; const JSC = bun.JSC; -const MarkedArrayBuffer = @import("../base.zig").MarkedArrayBuffer; -const getAllocator = @import("../base.zig").getAllocator; +const MarkedArrayBuffer = JSC.MarkedArrayBuffer; const JSValue = bun.JSC.JSValue; +const host_fn = JSC.host_fn; const JSGlobalObject = bun.JSC.JSGlobalObject; const JSPrivateDataPtr = bun.JSC.JSPrivateDataPtr; @@ -95,7 +95,7 @@ const BlobFileContentResult = struct { { const body = try JSC.WebCore.Body.Value.fromJS(global, js_obj); if (body == .Blob and body.Blob.store != null and body.Blob.store.?.data == .file) { - var fs: JSC.Node.NodeFS = .{}; + var fs: JSC.Node.fs.NodeFS = .{}; const read = fs.readFileWithOptions(.{ .path = body.Blob.store.?.data.file.pathlike }, .sync, .null_terminated); switch (read) { .err => { @@ -116,7 +116,7 @@ const BlobFileContentResult = struct { } }; -fn getContentType(headers: ?*JSC.FetchHeaders, blob: *const JSC.WebCore.AnyBlob, allocator: std.mem.Allocator) struct { MimeType, bool, bool } { +fn getContentType(headers: ?*WebCore.FetchHeaders, blob: *const WebCore.Blob.Any, allocator: std.mem.Allocator) struct { MimeType, bool, bool } { var needs_content_type = true; var content_type_needs_free = false; @@ -183,7 +183,7 @@ fn validateRouteName(global: *JSC.JSGlobalObject, path: []const u8) !void { } fn writeHeaders( - headers: *JSC.FetchHeaders, + headers: *WebCore.FetchHeaders, comptime ssl: bool, resp_ptr: ?*uws.NewApp(ssl).Response, ) void { @@ -1232,7 +1232,7 @@ pub const ServerConfig = struct { pub fn fromJS( global: *JSC.JSGlobalObject, args: *ServerConfig, - arguments: *JSC.Node.ArgumentsSlice, + arguments: *JSC.CallFrame.ArgumentsSlice, opts: FromJSOptions, ) bun.JSError!void { const vm = arguments.vm; @@ -2229,12 +2229,12 @@ fn NewRequestContext(comptime ssl_enabled: bool, comptime debug_mode: bool, comp ref_count: u8 = 1, response_ptr: ?*JSC.WebCore.Response = null, - blob: JSC.WebCore.AnyBlob = JSC.WebCore.AnyBlob{ .Blob = .{} }, + blob: JSC.WebCore.Blob.Any = JSC.WebCore.Blob.Any{ .Blob = .{} }, sendfile: SendfileContext = undefined, request_body_readable_stream_ref: JSC.WebCore.ReadableStream.Strong = .{}, - request_body: ?*JSC.BodyValueRef = null, + request_body: ?*WebCore.Body.Value.HiveRef = null, request_body_buf: std.ArrayListUnmanaged(u8) = .{}, request_body_content_len: usize = 0, @@ -2928,7 +2928,7 @@ fn NewRequestContext(comptime ssl_enabled: bool, comptime debug_mode: bool, comp const val = linux.sendfile(this.sendfile.socket_fd.cast(), this.sendfile.fd.cast(), &signed_offset, this.sendfile.remain); this.sendfile.offset = @as(Blob.SizeType, @intCast(signed_offset)); - const errcode = bun.C.getErrno(val); + const errcode = bun.sys.getErrno(val); this.sendfile.remain -|= @as(Blob.SizeType, @intCast(this.sendfile.offset -| start)); @@ -2943,7 +2943,7 @@ fn NewRequestContext(comptime ssl_enabled: bool, comptime debug_mode: bool, comp } else { var sbytes: std.posix.off_t = adjusted_count; const signed_offset = @as(i64, @bitCast(@as(u64, this.sendfile.offset))); - const errcode = bun.C.getErrno(std.c.sendfile( + const errcode = bun.sys.getErrno(std.c.sendfile( this.sendfile.fd.cast(), this.sendfile.socket_fd.cast(), signed_offset, @@ -3161,7 +3161,7 @@ fn NewRequestContext(comptime ssl_enabled: bool, comptime debug_mode: bool, comp } } - pub fn onReadFile(this: *RequestContext, result: Blob.ReadFileResultType) void { + pub fn onReadFile(this: *RequestContext, result: Blob.read_file.ReadFileResultType) void { defer this.deref(); if (this.isAbortedOrEnded()) { @@ -3439,7 +3439,7 @@ fn NewRequestContext(comptime ssl_enabled: bool, comptime debug_mode: bool, comp // we have to clone the request headers here since they will soon belong to a different request if (!request_object.hasFetchHeaders()) { - request_object.setFetchHeaders(JSC.FetchHeaders.createFromUWS(req)); + request_object.setFetchHeaders(.createFromUWS(req)); } // This object dies after the stack frame is popped @@ -3954,7 +3954,7 @@ fn NewRequestContext(comptime ssl_enabled: bool, comptime debug_mode: bool, comp return; } this.ref(); - byte_stream.pipe = JSC.WebCore.Pipe.New(@This(), onPipe).init(this); + byte_stream.pipe = JSC.WebCore.Pipe.Wrap(@This(), onPipe).init(this); this.readable_stream_ref = JSC.WebCore.ReadableStream.Strong.init(stream, globalThis); this.byte_stream = byte_stream; @@ -3996,7 +3996,7 @@ fn NewRequestContext(comptime ssl_enabled: bool, comptime debug_mode: bool, comp this.doRenderBlob(); } - pub fn onPipe(this: *RequestContext, stream: JSC.WebCore.StreamResult, allocator: std.mem.Allocator) void { + pub fn onPipe(this: *RequestContext, stream: JSC.WebCore.streams.Result, allocator: std.mem.Allocator) void { const stream_needs_deinit = stream == .owned or stream == .owned_and_done; const is_done = stream.isDone(); defer { @@ -4369,7 +4369,7 @@ fn NewRequestContext(comptime ssl_enabled: bool, comptime debug_mode: bool, comp writeStatus(ssl_enabled, this.resp, status); } - fn doWriteHeaders(this: *RequestContext, headers: *JSC.FetchHeaders) void { + fn doWriteHeaders(this: *RequestContext, headers: *WebCore.FetchHeaders) void { writeHeaders(headers, ssl_enabled, this.resp); } @@ -4596,14 +4596,10 @@ fn NewRequestContext(comptime ssl_enabled: bool, comptime debug_mode: bool, comp comptime { const export_prefix = "Bun__HTTPRequestContext" ++ (if (debug_mode) "Debug" else "") ++ (if (ThisServer.ssl_enabled) "TLS" else ""); - const jsonResolve = JSC.toJSHostFunction(onResolve); - @export(&jsonResolve, .{ .name = export_prefix ++ "__onResolve" }); - const jsonReject = JSC.toJSHostFunction(onReject); - @export(&jsonReject, .{ .name = export_prefix ++ "__onReject" }); - const jsonResolveStream = JSC.toJSHostFunction(onResolveStream); - @export(&jsonResolveStream, .{ .name = export_prefix ++ "__onResolveStream" }); - const jsonRejectStream = JSC.toJSHostFunction(onRejectStream); - @export(&jsonRejectStream, .{ .name = export_prefix ++ "__onRejectStream" }); + @export(&JSC.toJSHostFn(onResolve), .{ .name = export_prefix ++ "__onResolve" }); + @export(&JSC.toJSHostFn(onReject), .{ .name = export_prefix ++ "__onReject" }); + @export(&JSC.toJSHostFn(onResolveStream), .{ .name = export_prefix ++ "__onResolveStream" }); + @export(&JSC.toJSHostFn(onRejectStream), .{ .name = export_prefix ++ "__onRejectStream" }); } }; } @@ -5072,8 +5068,8 @@ const ServePlugins = struct { } } - pub const onResolve = JSC.toJSHostFunction(onResolveImpl); - pub const onReject = JSC.toJSHostFunction(onRejectImpl); + pub const onResolve = JSC.toJSHostFn(onResolveImpl); + pub const onReject = JSC.toJSHostFn(onRejectImpl); pub fn onResolveImpl(_: *JSC.JSGlobalObject, callframe: *JSC.CallFrame) bun.JSError!JSValue { ctxLog("onResolve", .{}); @@ -5214,13 +5210,13 @@ pub fn NewServer(protocol_enum: enum { http, https }, development_kind: enum { d /// So we have to store it. user_routes: std.ArrayListUnmanaged(UserRoute) = .{}, - pub const doStop = JSC.wrapInstanceMethod(ThisServer, "stopFromJS", false); - pub const dispose = JSC.wrapInstanceMethod(ThisServer, "disposeFromJS", false); - pub const doUpgrade = JSC.wrapInstanceMethod(ThisServer, "onUpgrade", false); - pub const doPublish = JSC.wrapInstanceMethod(ThisServer, "publish", false); + pub const doStop = host_fn.wrapInstanceMethod(ThisServer, "stopFromJS", false); + pub const dispose = host_fn.wrapInstanceMethod(ThisServer, "disposeFromJS", false); + pub const doUpgrade = host_fn.wrapInstanceMethod(ThisServer, "onUpgrade", false); + pub const doPublish = host_fn.wrapInstanceMethod(ThisServer, "publish", false); pub const doReload = onReload; pub const doFetch = onFetch; - pub const doRequestIP = JSC.wrapInstanceMethod(ThisServer, "requestIP", false); + pub const doRequestIP = host_fn.wrapInstanceMethod(ThisServer, "requestIP", false); pub const doTimeout = timeout; const UserRoute = struct { @@ -5401,7 +5397,7 @@ pub fn NewServer(protocol_enum: enum { http, https }, development_kind: enum { d var data_value = JSC.JSValue.zero; // if we converted a HeadersInit to a Headers object, we need to free it - var fetch_headers_to_deref: ?*JSC.FetchHeaders = null; + var fetch_headers_to_deref: ?*WebCore.FetchHeaders = null; defer { if (fetch_headers_to_deref) |fh| { @@ -5435,9 +5431,9 @@ pub fn NewServer(protocol_enum: enum { http, https }, development_kind: enum { d break :getter; } - var fetch_headers_to_use: *JSC.FetchHeaders = headers_value.as(JSC.FetchHeaders) orelse brk: { + var fetch_headers_to_use: *WebCore.FetchHeaders = headers_value.as(WebCore.FetchHeaders) orelse brk: { if (headers_value.isObject()) { - if (JSC.FetchHeaders.createFromJS(globalThis, headers_value)) |fetch_headers| { + if (WebCore.FetchHeaders.createFromJS(globalThis, headers_value)) |fetch_headers| { fetch_headers_to_deref = fetch_headers; break :brk fetch_headers; } @@ -5532,7 +5528,7 @@ pub fn NewServer(protocol_enum: enum { http, https }, development_kind: enum { d var data_value = JSC.JSValue.zero; // if we converted a HeadersInit to a Headers object, we need to free it - var fetch_headers_to_deref: ?*JSC.FetchHeaders = null; + var fetch_headers_to_deref: ?*WebCore.FetchHeaders = null; defer { if (fetch_headers_to_deref) |fh| { @@ -5563,9 +5559,9 @@ pub fn NewServer(protocol_enum: enum { http, https }, development_kind: enum { d break :getter; } - var fetch_headers_to_use: *JSC.FetchHeaders = headers_value.as(JSC.FetchHeaders) orelse brk: { + var fetch_headers_to_use: *WebCore.FetchHeaders = headers_value.as(WebCore.FetchHeaders) orelse brk: { if (headers_value.isObject()) { - if (JSC.FetchHeaders.createFromJS(globalThis, headers_value)) |fetch_headers| { + if (WebCore.FetchHeaders.createFromJS(globalThis, headers_value)) |fetch_headers| { fetch_headers_to_deref = fetch_headers; break :brk fetch_headers; } @@ -5731,7 +5727,7 @@ pub fn NewServer(protocol_enum: enum { http, https }, development_kind: enum { d return globalThis.throwNotEnoughArguments("reload", 1, 0); } - var args_slice = JSC.Node.ArgumentsSlice.init(globalThis.bunVM(), arguments); + var args_slice = JSC.CallFrame.ArgumentsSlice.init(globalThis.bunVM(), arguments); defer args_slice.deinit(); var new_config: ServerConfig = .{}; @@ -5767,9 +5763,9 @@ pub fn NewServer(protocol_enum: enum { http, https }, development_kind: enum { d return JSPromise.dangerouslyCreateRejectedPromiseValueWithoutNotifyingVM(ctx, ZigString.init(fetch_error).toErrorInstance(ctx)); } - var headers: ?*JSC.FetchHeaders = null; + var headers: ?*WebCore.FetchHeaders = null; var method = HTTP.Method.GET; - var args = JSC.Node.ArgumentsSlice.init(ctx.bunVM(), arguments); + var args = JSC.CallFrame.ArgumentsSlice.init(ctx.bunVM(), arguments); defer args.deinit(); var first_arg = args.nextEat().?; @@ -5802,15 +5798,15 @@ pub fn NewServer(protocol_enum: enum { http, https }, development_kind: enum { d if (arguments.len >= 2 and arguments[1].isObject()) { var opts = arguments[1]; if (opts.fastGet(ctx, .method)) |method_| { - var slice_ = try method_.toSlice(ctx, getAllocator(ctx)); + var slice_ = try method_.toSlice(ctx, bun.default_allocator); defer slice_.deinit(); method = HTTP.Method.which(slice_.slice()) orelse method; } if (opts.fastGet(ctx, .headers)) |headers_| { - if (headers_.as(JSC.FetchHeaders)) |headers__| { + if (headers_.as(WebCore.FetchHeaders)) |headers__| { headers = headers__; - } else if (JSC.FetchHeaders.createFromJS(ctx, headers_)) |headers__| { + } else if (WebCore.FetchHeaders.createFromJS(ctx, headers_)) |headers__| { headers = headers__; } } @@ -5839,7 +5835,7 @@ pub fn NewServer(protocol_enum: enum { http, https }, development_kind: enum { d ); } else { const fetch_error = JSC.WebCore.Fetch.fetch_type_error_strings.get(bun.JSC.C.JSValueGetType(ctx, first_arg.asRef())); - const err = JSC.toTypeError(.INVALID_ARG_TYPE, "{s}", .{fetch_error}, ctx); + const err = ctx.toTypeError(.INVALID_ARG_TYPE, "{s}", .{fetch_error}); return JSPromise.dangerouslyCreateRejectedPromiseValueWithoutNotifyingVM(ctx, err); } @@ -6112,7 +6108,7 @@ pub fn NewServer(protocol_enum: enum { http, https }, development_kind: enum { d .promise = .{ .strong = JSC.Strong.create(this.all_closed_promise.value(), this.globalThis), }, - .tracker = JSC.AsyncTaskTracker.init(vm), + .tracker = JSC.Debugger.AsyncTaskTracker.init(vm), }); event_loop.enqueueTask(JSC.Task.init(task)); } @@ -6337,7 +6333,7 @@ pub fn NewServer(protocol_enum: enum { http, https }, development_kind: enum { d if (comptime Environment.isLinux) { const rc: i32 = -1; const code = Sys.getErrno(rc); - if (code == bun.C.E.ACCES) { + if (code == bun.sys.E.ACCES) { error_instance = (JSC.SystemError{ .message = bun.String.init(std.fmt.bufPrint(&output_buf, "permission denied {s}:{d}", .{ tcp.hostname orelse "0.0.0.0", tcp.port }) catch "Failed to start server"), .code = bun.String.static("EACCES"), @@ -7365,7 +7361,7 @@ pub const SavedRequest = struct { pub const ServerAllConnectionsClosedTask = struct { globalObject: *JSC.JSGlobalObject, promise: JSC.JSPromise.Strong, - tracker: JSC.AsyncTaskTracker, + tracker: JSC.Debugger.AsyncTaskTracker, pub const new = bun.TrivialNew(@This()); diff --git a/src/bun.js/api/server/HTMLBundle.zig b/src/bun.js/api/server/HTMLBundle.zig index 0e82d73e91..2605d2ec67 100644 --- a/src/bun.js/api/server/HTMLBundle.zig +++ b/src/bun.js/api/server/HTMLBundle.zig @@ -356,8 +356,8 @@ pub const Route = struct { // Create static routes for each output file for (output_files) |*output_file| { - const blob = JSC.WebCore.AnyBlob{ .Blob = output_file.toBlob(bun.default_allocator, globalThis) catch bun.outOfMemory() }; - var headers = JSC.WebCore.Headers{ .allocator = bun.default_allocator }; + const blob = JSC.WebCore.Blob.Any{ .Blob = output_file.toBlob(bun.default_allocator, globalThis) catch bun.outOfMemory() }; + var headers = bun.http.Headers{ .allocator = bun.default_allocator }; const content_type = blob.Blob.contentTypeOrMimeType() orelse brk: { bun.debugAssert(false); // should be populated by `output_file.toBlob` break :brk output_file.loader.toMimeType(&.{}).value; diff --git a/src/bun.js/api/server/ServerWebSocket.zig b/src/bun.js/api/server/ServerWebSocket.zig index 185787c7bc..d2c8f5c188 100644 --- a/src/bun.js/api/server/ServerWebSocket.zig +++ b/src/bun.js/api/server/ServerWebSocket.zig @@ -1,14 +1,14 @@ handler: *WebSocketServer.Handler, this_value: JSValue = .zero, flags: Flags = .{}, -signal: ?*JSC.AbortSignal = null, +signal: ?*bun.webcore.AbortSignal = null, // We pack the per-socket data into this struct below const Flags = packed struct(u64) { ssl: bool = false, closed: bool = false, opened: bool = false, - binary_type: JSC.BinaryType = .Buffer, + binary_type: JSC.ArrayBuffer.BinaryType = .Buffer, packed_websocket_ptr: u57 = 0, inline fn websocket(this: Flags) uws.AnyWebSocket { @@ -1135,7 +1135,7 @@ pub fn getBinaryType( pub fn setBinaryType(this: *ServerWebSocket, globalThis: *JSC.JSGlobalObject, value: JSC.JSValue) callconv(.C) bool { log("setBinaryType()", .{}); - const btype = JSC.BinaryType.fromJSValue(globalThis, value) catch return false; + const btype = JSC.ArrayBuffer.BinaryType.fromJSValue(globalThis, value) catch return false; switch (btype orelse // some other value which we don't support .Float64Array) { diff --git a/src/bun.js/api/server/StaticRoute.zig b/src/bun.js/api/server/StaticRoute.zig index 8b6eca8073..3815dcb9c6 100644 --- a/src/bun.js/api/server/StaticRoute.zig +++ b/src/bun.js/api/server/StaticRoute.zig @@ -91,7 +91,7 @@ pub fn fromJS(globalThis: *JSC.JSGlobalObject, argument: JSC.JSValue) bun.JSErro .Null, .Empty => { break :brk .{ - .InternalBlob = JSC.WebCore.InternalBlob{ + .InternalBlob = .{ .bytes = std.ArrayList(u8).init(bun.default_allocator), }, }; @@ -348,8 +348,8 @@ const bun = @import("bun"); const Api = @import("../../../api/schema.zig").Api; const JSC = bun.JSC; const uws = bun.uws; -const Headers = JSC.WebCore.Headers; +const Headers = bun.http.Headers; const AnyServer = JSC.API.AnyServer; -const AnyBlob = JSC.WebCore.AnyBlob; +const AnyBlob = JSC.WebCore.Blob.Any; const writeStatus = @import("../server.zig").writeStatus; const AnyResponse = uws.AnyResponse; diff --git a/src/bun.js/base.zig b/src/bun.js/base.zig deleted file mode 100644 index 6b64d19f0f..0000000000 --- a/src/bun.js/base.zig +++ /dev/null @@ -1,1545 +0,0 @@ -const std = @import("std"); -const bun = @import("bun"); -const string = bun.string; -const Output = bun.Output; -const Global = bun.Global; -const Environment = bun.Environment; -const strings = bun.strings; -const MutableString = bun.MutableString; -const stringZ = bun.stringZ; -const default_allocator = bun.default_allocator; -const JSC = bun.JSC; -const Test = @import("./test/jest.zig"); -const Router = @import("./api/filesystem_router.zig"); -const IdentityContext = @import("../identity_context.zig").IdentityContext; -const uws = bun.uws; -const TaggedPointerTypes = @import("../ptr.zig"); -const TaggedPointerUnion = TaggedPointerTypes.TaggedPointerUnion; -const JSError = bun.JSError; - -pub const JSValueRef = bun.JSC.C.JSValueRef; - -pub const Lifetime = enum { - allocated, - temporary, -}; - -/// Marshall a zig value into a JSValue using comptime reflection. -/// -/// - Primitives are converted to their JS equivalent. -/// - Types with `toJS` or `toJSNewlyCreated` methods have them called -/// - Slices are converted to JS arrays -/// - Enums are converted to 32-bit numbers. -pub fn toJS(globalObject: *JSC.JSGlobalObject, comptime ValueType: type, value: ValueType, comptime lifetime: Lifetime) JSC.JSValue { - const Type = comptime brk: { - var CurrentType = ValueType; - if (@typeInfo(ValueType) == .optional) { - CurrentType = @typeInfo(ValueType).optional.child; - } - break :brk if (@typeInfo(CurrentType) == .pointer and @typeInfo(CurrentType).pointer.size == .one) - @typeInfo(CurrentType).pointer.child - else - CurrentType; - }; - - if (comptime bun.trait.isNumber(Type)) { - return JSC.JSValue.jsNumberWithType(Type, if (comptime Type != ValueType) value.* else value); - } - - switch (comptime Type) { - void => return .undefined, - bool => return JSC.JSValue.jsBoolean(if (comptime Type != ValueType) value.* else value), - *JSC.JSGlobalObject => return value.toJSValue(), - []const u8, [:0]const u8, [*:0]const u8, []u8, [:0]u8, [*:0]u8 => { - return bun.String.createUTF8ForJS(globalObject, value); - }, - []const bun.String => { - defer { - for (value) |out| { - out.deref(); - } - bun.default_allocator.free(value); - } - return bun.String.toJSArray(globalObject, value); - }, - JSC.JSValue => return if (Type != ValueType) value.* else value, - - inline []const u16, []const u32, []const i16, []const i8, []const i32, []const f32 => { - var array = JSC.JSValue.createEmptyArray(globalObject, value.len); - for (value, 0..) |item, i| { - array.putIndex( - globalObject, - @truncate(i), - JSC.jsNumber(item), - ); - } - return array; - }, - - else => { - - // Recursion can stack overflow here - if (bun.trait.isSlice(Type)) { - const Child = comptime std.meta.Child(Type); - - var array = JSC.JSValue.createEmptyArray(globalObject, value.len); - for (value, 0..) |*item, i| { - const res = toJS(globalObject, *Child, item, lifetime); - if (res == .zero) return .zero; - array.putIndex( - globalObject, - @truncate(i), - res, - ); - } - return array; - } - - if (comptime @hasDecl(Type, "toJSNewlyCreated") and @typeInfo(@TypeOf(@field(Type, "toJSNewlyCreated"))).@"fn".params.len == 2) { - return value.toJSNewlyCreated(globalObject); - } - - if (comptime @hasDecl(Type, "toJS") and @typeInfo(@TypeOf(@field(Type, "toJS"))).@"fn".params.len == 2) { - return value.toJS(globalObject); - } - - // must come after toJS check in case this enum implements its own serializer. - if (@typeInfo(Type) == .@"enum") { - // FIXME: creates non-normalized integers (e.g. u2), which - // aren't handled by `jsNumberWithType` rn - return JSC.JSValue.jsNumberWithType(u32, @as(u32, @intFromEnum(value))); - } - - @compileError("dont know how to convert " ++ @typeName(ValueType) ++ " to JS"); - }, - } -} - -pub const Properties = struct { - pub const UTF8 = struct { - pub var filepath: string = "filepath"; - - pub const module: string = "module"; - pub const globalThis: string = "globalThis"; - pub const exports: string = "exports"; - pub const log: string = "log"; - pub const debug: string = "debug"; - pub const name: string = "name"; - pub const info: string = "info"; - pub const error_: string = "error"; - pub const warn: string = "warn"; - pub const console: string = "console"; - pub const require: string = "require"; - pub const description: string = "description"; - pub const initialize_bundled_module: string = "$$m"; - pub const load_module_function: string = "$lOaDuRcOdE$"; - pub const window: string = "window"; - pub const default: string = "default"; - pub const include: string = "include"; - - pub const env: string = "env"; - - pub const GET = "GET"; - pub const PUT = "PUT"; - pub const POST = "POST"; - pub const PATCH = "PATCH"; - pub const HEAD = "HEAD"; - pub const OPTIONS = "OPTIONS"; - - pub const navigate = "navigate"; - pub const follow = "follow"; - }; -}; - -pub const PathString = bun.PathString; - -pub fn createError( - globalThis: *JSC.JSGlobalObject, - comptime fmt: string, - args: anytype, -) JSC.JSValue { - if (comptime std.meta.fields(@TypeOf(args)).len == 0) { - var zig_str = JSC.ZigString.init(fmt); - if (comptime !strings.isAllASCII(fmt)) { - zig_str.markUTF16(); - } - - return zig_str.toErrorInstance(globalThis); - } else { - var fallback = std.heap.stackFallback(256, default_allocator); - var allocator = fallback.get(); - - const buf = std.fmt.allocPrint(allocator, fmt, args) catch unreachable; - var zig_str = JSC.ZigString.init(buf); - zig_str.detectEncoding(); - // it alwayas clones - const res = zig_str.toErrorInstance(globalThis); - allocator.free(buf); - return res; - } -} - -fn toTypeErrorWithCode( - code: []const u8, - comptime fmt: string, - args: anytype, - ctx: *JSC.JSGlobalObject, -) JSC.JSValue { - @branchHint(.cold); - var zig_str: JSC.ZigString = undefined; - if (comptime std.meta.fields(@TypeOf(args)).len == 0) { - zig_str = JSC.ZigString.init(fmt); - zig_str.detectEncoding(); - } else { - const buf = std.fmt.allocPrint(default_allocator, fmt, args) catch unreachable; - zig_str = JSC.ZigString.init(buf); - zig_str.detectEncoding(); - zig_str.mark(); - } - const code_str = JSC.ZigString.init(code); - return JSC.JSValue.createTypeError(&zig_str, &code_str, ctx); -} - -pub fn toTypeError( - code: JSC.Error, - comptime fmt: [:0]const u8, - args: anytype, - ctx: *JSC.JSGlobalObject, -) JSC.JSValue { - return code.fmt(ctx, fmt, args); -} - -pub fn toInvalidArguments( - comptime fmt: [:0]const u8, - args: anytype, - ctx: *JSC.JSGlobalObject, -) JSC.JSValue { - @branchHint(.cold); - return JSC.Error.INVALID_ARG_TYPE.fmt(ctx, fmt, args); -} - -pub fn getAllocator(_: *JSC.JSGlobalObject) std.mem.Allocator { - return default_allocator; -} - -/// Print a JSValue to stdout; this is only meant for debugging purposes -pub fn dump(value: JSC.WebCore.JSValue, globalObject: *JSC.JSGlobalObject) !void { - var formatter = JSC.ConsoleObject.Formatter{ .globalThis = globalObject }; - defer formatter.deinit(); - try Output.errorWriter().print("{}\n", .{value.toFmt(globalObject, &formatter)}); - Output.flush(); -} - -pub const JSStringList = std.ArrayList(JSC.C.JSStringRef); - -pub const ArrayBuffer = extern struct { - ptr: [*]u8 = undefined, - offset: usize = 0, - len: usize = 0, - byte_len: usize = 0, - typed_array_type: JSC.JSValue.JSType = .Cell, - value: JSC.JSValue = JSC.JSValue.zero, - shared: bool = false, - - // require('buffer').kMaxLength. - // keep in sync with Bun::Buffer::kMaxLength - pub const max_size = std.math.maxInt(c_uint); - - extern fn JSBuffer__fromMmap(*JSC.JSGlobalObject, addr: *anyopaque, len: usize) JSC.JSValue; - - // 4 MB or so is pretty good for mmap() - const mmap_threshold = 1024 * 1024 * 4; - - pub fn bytesPerElement(this: *const ArrayBuffer) ?u8 { - return switch (this.typed_array_type) { - .ArrayBuffer, .DataView => null, - .Uint8Array, .Uint8ClampedArray, .Int8Array => 1, - .Uint16Array, .Int16Array, .Float16Array => 2, - .Uint32Array, .Int32Array, .Float32Array => 4, - .BigUint64Array, .BigInt64Array, .Float64Array => 8, - else => null, - }; - } - - /// Only use this when reading from the file descriptor is _very_ cheap. Like, for example, an in-memory file descriptor. - /// Do not use this for pipes, however tempting it may seem. - pub fn toJSBufferFromFd(fd: bun.FileDescriptor, size: usize, globalObject: *JSC.JSGlobalObject) JSC.JSValue { - const buffer_value = Bun__createUint8ArrayForCopy(globalObject, null, size, true); - if (buffer_value == .zero) { - return .zero; - } - - var array_buffer = buffer_value.asArrayBuffer(globalObject) orelse @panic("Unexpected"); - var bytes = array_buffer.byteSlice(); - - buffer_value.ensureStillAlive(); - - var read: isize = 0; - while (bytes.len > 0) { - switch (bun.sys.pread(fd, bytes, read)) { - .result => |amount| { - bytes = bytes[amount..]; - read += @intCast(amount); - - if (amount == 0) { - if (bytes.len > 0) { - @memset(bytes, 0); - } - break; - } - }, - .err => |err| { - return globalObject.throwValue(err.toJSC(globalObject)) catch .zero; - }, - } - } - - buffer_value.ensureStillAlive(); - - return buffer_value; - } - - extern fn ArrayBuffer__fromSharedMemfd(fd: i64, globalObject: *JSC.JSGlobalObject, byte_offset: usize, byte_length: usize, total_size: usize, JSC.JSValue.JSType) JSC.JSValue; - pub const toArrayBufferFromSharedMemfd = ArrayBuffer__fromSharedMemfd; - - pub fn toJSBufferFromMemfd(fd: bun.FileDescriptor, globalObject: *JSC.JSGlobalObject) bun.JSError!JSC.JSValue { - const stat = switch (bun.sys.fstat(fd)) { - .err => |err| { - fd.close(); - return globalObject.throwValue(err.toJSC(globalObject)); - }, - .result => |fstat| fstat, - }; - - const size = stat.size; - - if (size == 0) { - fd.close(); - return createBuffer(globalObject, ""); - } - - // mmap() is kind of expensive to do - // It creates a new memory mapping. - // If there is a lot of repetitive memory allocations in a tight loop, it performs poorly. - // So we clone it when it's small. - if (size < mmap_threshold) { - const result = toJSBufferFromFd(fd, @intCast(size), globalObject); - fd.close(); - return result; - } - - const result = bun.sys.mmap( - null, - @intCast(@max(size, 0)), - std.posix.PROT.READ | std.posix.PROT.WRITE, - .{ .TYPE = .SHARED }, - fd, - 0, - ); - fd.close(); - - switch (result) { - .result => |buf| { - return JSBuffer__fromMmap(globalObject, buf.ptr, buf.len); - }, - .err => |err| { - return globalObject.throwValue(err.toJSC(globalObject)); - }, - } - } - - pub const Strong = struct { - array_buffer: ArrayBuffer, - held: JSC.Strong = .empty, - - pub fn clear(this: *ArrayBuffer.Strong) void { - var ref: *JSC.napi.Ref = this.ref orelse return; - ref.set(JSC.JSValue.zero); - } - - pub fn slice(this: *const ArrayBuffer.Strong) []u8 { - return this.array_buffer.slice(); - } - - pub fn deinit(this: *ArrayBuffer.Strong) void { - this.held.deinit(); - } - }; - - pub const empty = ArrayBuffer{ .offset = 0, .len = 0, .byte_len = 0, .typed_array_type = .Uint8Array, .ptr = undefined }; - - pub const name = "Bun__ArrayBuffer"; - pub const Stream = std.io.FixedBufferStream([]u8); - - pub inline fn stream(this: ArrayBuffer) Stream { - return Stream{ .pos = 0, .buf = this.slice() }; - } - - // TODO: this can throw an error! should use JSError!JSValue - pub fn create(globalThis: *JSC.JSGlobalObject, bytes: []const u8, comptime kind: JSC.JSValue.JSType) JSC.JSValue { - JSC.markBinding(@src()); - return switch (comptime kind) { - .Uint8Array => Bun__createUint8ArrayForCopy(globalThis, bytes.ptr, bytes.len, false), - .ArrayBuffer => Bun__createArrayBufferForCopy(globalThis, bytes.ptr, bytes.len), - else => @compileError("Not implemented yet"), - }; - } - - pub fn createEmpty(globalThis: *JSC.JSGlobalObject, comptime kind: JSC.JSValue.JSType) JSC.JSValue { - JSC.markBinding(@src()); - - return switch (comptime kind) { - .Uint8Array => Bun__createUint8ArrayForCopy(globalThis, null, 0, false), - .ArrayBuffer => Bun__createArrayBufferForCopy(globalThis, null, 0), - else => @compileError("Not implemented yet"), - }; - } - - pub fn createBuffer(globalThis: *JSC.JSGlobalObject, bytes: []const u8) JSC.JSValue { - JSC.markBinding(@src()); - return Bun__createUint8ArrayForCopy(globalThis, bytes.ptr, bytes.len, true); - } - - pub fn createUint8Array(globalThis: *JSC.JSGlobalObject, bytes: []const u8) JSC.JSValue { - JSC.markBinding(@src()); - return Bun__createUint8ArrayForCopy(globalThis, bytes.ptr, bytes.len, false); - } - - extern "c" fn Bun__allocUint8ArrayForCopy(*JSC.JSGlobalObject, usize, **anyopaque) JSC.JSValue; - extern "c" fn Bun__allocArrayBufferForCopy(*JSC.JSGlobalObject, usize, **anyopaque) JSC.JSValue; - - pub fn alloc(global: *JSC.JSGlobalObject, comptime kind: JSC.JSValue.JSType, len: u32) JSError!struct { JSC.JSValue, []u8 } { - var ptr: [*]u8 = undefined; - const buf = switch (comptime kind) { - .Uint8Array => Bun__allocUint8ArrayForCopy(global, len, @ptrCast(&ptr)), - .ArrayBuffer => Bun__allocArrayBufferForCopy(global, len, @ptrCast(&ptr)), - else => @compileError("Not implemented yet"), - }; - if (buf == .zero) { - return error.JSError; - } - return .{ buf, ptr[0..len] }; - } - - extern "c" fn Bun__createUint8ArrayForCopy(*JSC.JSGlobalObject, ptr: ?*const anyopaque, len: usize, buffer: bool) JSC.JSValue; - extern "c" fn Bun__createArrayBufferForCopy(*JSC.JSGlobalObject, ptr: ?*const anyopaque, len: usize) JSC.JSValue; - - pub fn fromTypedArray(ctx: *JSC.JSGlobalObject, value: JSC.JSValue) ArrayBuffer { - var out = std.mem.zeroes(ArrayBuffer); - const was = value.asArrayBuffer_(ctx, &out); - bun.assert(was); - out.value = value; - return out; - } - - extern "c" fn JSArrayBuffer__fromDefaultAllocator(*JSC.JSGlobalObject, ptr: [*]u8, len: usize) JSC.JSValue; - pub fn toJSFromDefaultAllocator(globalThis: *JSC.JSGlobalObject, bytes: []u8) JSC.JSValue { - return JSArrayBuffer__fromDefaultAllocator(globalThis, bytes.ptr, bytes.len); - } - - pub fn fromDefaultAllocator(globalThis: *JSC.JSGlobalObject, bytes: []u8, comptime typed_array_type: JSC.JSValue.JSType) JSC.JSValue { - return switch (typed_array_type) { - .ArrayBuffer => JSArrayBuffer__fromDefaultAllocator(globalThis, bytes.ptr, bytes.len), - .Uint8Array => JSC.JSUint8Array.fromBytes(globalThis, bytes), - else => @compileError("Not implemented yet"), - }; - } - - pub fn fromBytes(bytes: []u8, typed_array_type: JSC.JSValue.JSType) ArrayBuffer { - return ArrayBuffer{ .offset = 0, .len = @as(u32, @intCast(bytes.len)), .byte_len = @as(u32, @intCast(bytes.len)), .typed_array_type = typed_array_type, .ptr = bytes.ptr }; - } - - pub fn toJSUnchecked(this: ArrayBuffer, ctx: *JSC.JSGlobalObject, exception: JSC.C.ExceptionRef) JSC.JSValue { - - // The reason for this is - // JSC C API returns a detached arraybuffer - // if you pass it a zero-length TypedArray - // we don't ever want to send the user a detached arraybuffer - // that's just silly. - if (this.byte_len == 0) { - if (this.typed_array_type == .ArrayBuffer) { - return create(ctx, "", .ArrayBuffer); - } - - if (this.typed_array_type == .Uint8Array) { - return create(ctx, "", .Uint8Array); - } - - // TODO: others - } - - if (this.typed_array_type == .ArrayBuffer) { - return JSC.JSValue.fromRef(JSC.C.JSObjectMakeArrayBufferWithBytesNoCopy( - ctx, - this.ptr, - this.byte_len, - MarkedArrayBuffer_deallocator, - @as(*anyopaque, @ptrFromInt(@intFromPtr(&bun.default_allocator))), - exception, - )); - } - - return JSC.JSValue.fromRef(JSC.C.JSObjectMakeTypedArrayWithBytesNoCopy( - ctx, - this.typed_array_type.toC(), - this.ptr, - this.byte_len, - MarkedArrayBuffer_deallocator, - @as(*anyopaque, @ptrFromInt(@intFromPtr(&bun.default_allocator))), - exception, - )); - } - - const log = Output.scoped(.ArrayBuffer, false); - - pub fn toJS(this: ArrayBuffer, ctx: *JSC.JSGlobalObject, exception: JSC.C.ExceptionRef) JSC.JSValue { - if (this.value != .zero) { - return this.value; - } - - // If it's not a mimalloc heap buffer, we're not going to call a deallocator - if (this.len > 0 and !bun.Mimalloc.mi_is_in_heap_region(this.ptr)) { - log("toJS but will never free: {d} bytes", .{this.len}); - - if (this.typed_array_type == .ArrayBuffer) { - return JSC.JSValue.fromRef(JSC.C.JSObjectMakeArrayBufferWithBytesNoCopy( - ctx, - this.ptr, - this.byte_len, - null, - null, - exception, - )); - } - - return JSC.JSValue.fromRef(JSC.C.JSObjectMakeTypedArrayWithBytesNoCopy( - ctx, - this.typed_array_type.toC(), - this.ptr, - this.byte_len, - null, - null, - exception, - )); - } - - return this.toJSUnchecked(ctx, exception); - } - - pub fn toJSWithContext( - this: ArrayBuffer, - ctx: *JSC.JSGlobalObject, - deallocator: ?*anyopaque, - callback: JSC.C.JSTypedArrayBytesDeallocator, - exception: JSC.C.ExceptionRef, - ) JSC.JSValue { - if (this.value != .zero) { - return this.value; - } - - if (this.typed_array_type == .ArrayBuffer) { - return JSC.JSValue.fromRef(JSC.C.JSObjectMakeArrayBufferWithBytesNoCopy( - ctx, - this.ptr, - this.byte_len, - callback, - deallocator, - exception, - )); - } - - return JSC.JSValue.fromRef(JSC.C.JSObjectMakeTypedArrayWithBytesNoCopy( - ctx, - this.typed_array_type.toC(), - this.ptr, - this.byte_len, - callback, - deallocator, - exception, - )); - } - - pub const fromArrayBuffer = fromTypedArray; - - /// The equivalent of - /// - /// ```js - /// new ArrayBuffer(view.buffer, view.byteOffset, view.byteLength) - /// ``` - pub inline fn byteSlice(this: *const @This()) []u8 { - return this.ptr[this.offset..][0..this.byte_len]; - } - - /// The equivalent of - /// - /// ```js - /// new ArrayBuffer(view.buffer, view.byteOffset, view.byteLength) - /// ``` - pub const slice = byteSlice; - - pub inline fn asU16(this: *const @This()) []u16 { - return std.mem.bytesAsSlice(u16, @as([*]u16, @ptrCast(@alignCast(this.ptr)))[this.offset..this.byte_len]); - } - - pub inline fn asU16Unaligned(this: *const @This()) []align(1) u16 { - return std.mem.bytesAsSlice(u16, @as([*]align(1) u16, @ptrCast(@alignCast(this.ptr)))[this.offset..this.byte_len]); - } - - pub inline fn asU32(this: *const @This()) []u32 { - return std.mem.bytesAsSlice(u32, @as([*]u32, @ptrCast(@alignCast(this.ptr)))[this.offset..this.byte_len]); - } -}; - -pub const MarkedArrayBuffer = struct { - buffer: ArrayBuffer = .{}, - allocator: ?std.mem.Allocator = null, - - pub const Stream = ArrayBuffer.Stream; - - pub inline fn stream(this: *MarkedArrayBuffer) Stream { - return this.buffer.stream(); - } - - pub fn fromTypedArray(ctx: *JSC.JSGlobalObject, value: JSC.JSValue) MarkedArrayBuffer { - return MarkedArrayBuffer{ - .allocator = null, - .buffer = ArrayBuffer.fromTypedArray(ctx, value), - }; - } - - pub fn fromArrayBuffer(ctx: *JSC.JSGlobalObject, value: JSC.JSValue) MarkedArrayBuffer { - return MarkedArrayBuffer{ - .allocator = null, - .buffer = ArrayBuffer.fromArrayBuffer(ctx, value), - }; - } - - pub fn fromString(str: []const u8, allocator: std.mem.Allocator) !MarkedArrayBuffer { - const buf = try allocator.dupe(u8, str); - return MarkedArrayBuffer.fromBytes(buf, allocator, JSC.JSValue.JSType.Uint8Array); - } - - pub fn fromJS(global: *JSC.JSGlobalObject, value: JSC.JSValue) ?MarkedArrayBuffer { - const array_buffer = value.asArrayBuffer(global) orelse return null; - return MarkedArrayBuffer{ .buffer = array_buffer, .allocator = null }; - } - - pub fn fromBytes(bytes: []u8, allocator: std.mem.Allocator, typed_array_type: JSC.JSValue.JSType) MarkedArrayBuffer { - return MarkedArrayBuffer{ - .buffer = ArrayBuffer.fromBytes(bytes, typed_array_type), - .allocator = allocator, - }; - } - - pub const empty = MarkedArrayBuffer{ - .allocator = null, - .buffer = ArrayBuffer.empty, - }; - - pub inline fn slice(this: *const @This()) []u8 { - return this.buffer.byteSlice(); - } - - pub fn destroy(this: *MarkedArrayBuffer) void { - const content = this.*; - if (this.allocator) |allocator| { - this.allocator = null; - allocator.free(content.buffer.slice()); - allocator.destroy(this); - } - } - - pub fn init(allocator: std.mem.Allocator, size: u32, typed_array_type: JSC.JSValue.JSType) !*MarkedArrayBuffer { - const bytes = try allocator.alloc(u8, size); - const container = try allocator.create(MarkedArrayBuffer); - container.* = MarkedArrayBuffer.fromBytes(bytes, allocator, typed_array_type); - return container; - } - - pub fn toNodeBuffer(this: *const MarkedArrayBuffer, ctx: *JSC.JSGlobalObject) JSC.JSValue { - return JSC.JSValue.createBufferWithCtx(ctx, this.buffer.byteSlice(), this.buffer.ptr, MarkedArrayBuffer_deallocator); - } - - pub fn toJSObjectRef(this: *const MarkedArrayBuffer, ctx: *JSC.JSGlobalObject, exception: JSC.C.ExceptionRef) bun.JSC.C.JSObjectRef { - if (!this.buffer.value.isEmptyOrUndefinedOrNull()) { - return this.buffer.value.asObjectRef(); - } - if (this.buffer.byte_len == 0) { - return JSC.C.JSObjectMakeTypedArray( - ctx, - this.buffer.typed_array_type.toC(), - 0, - exception, - ); - } - - return JSC.C.JSObjectMakeTypedArrayWithBytesNoCopy( - ctx, - this.buffer.typed_array_type.toC(), - this.buffer.ptr, - - this.buffer.byte_len, - MarkedArrayBuffer_deallocator, - this.buffer.ptr, - exception, - ); - } - - // TODO: refactor this - pub fn toJS(this: *const MarkedArrayBuffer, globalObject: *JSC.JSGlobalObject) JSC.JSValue { - var exception = [_]JSC.C.JSValueRef{null}; - const obj = this.toJSObjectRef(globalObject, &exception); - - if (exception[0] != null) { - return globalObject.throwValue(JSC.JSValue.c(exception[0])) catch return .zero; - } - - return JSC.JSValue.c(obj); - } -}; - -// expensive heap reference-counted string type -// only use this for big strings -// like source code -// not little ones -pub const RefString = struct { - ptr: [*]const u8 = undefined, - len: usize = 0, - hash: Hash = 0, - impl: bun.WTF.StringImpl, - - allocator: std.mem.Allocator, - - ctx: ?*anyopaque = null, - onBeforeDeinit: ?*const Callback = null, - - pub const Hash = u32; - pub const Map = std.HashMap(Hash, *JSC.RefString, IdentityContext(Hash), 80); - - pub fn toJS(this: *RefString, global: *JSC.JSGlobalObject) JSC.JSValue { - return bun.String.init(this.impl).toJS(global); - } - - pub const Callback = fn (ctx: *anyopaque, str: *RefString) void; - - pub fn computeHash(input: []const u8) u32 { - return std.hash.XxHash32.hash(0, input); - } - - pub fn slice(this: *RefString) []const u8 { - this.ref(); - - return this.leak(); - } - - pub fn ref(this: *RefString) void { - this.impl.ref(); - } - - pub fn leak(this: RefString) []const u8 { - @setRuntimeSafety(false); - return this.ptr[0..this.len]; - } - - pub fn deref(this: *RefString) void { - this.impl.deref(); - } - - pub fn deinit(this: *RefString) void { - if (this.onBeforeDeinit) |onBeforeDeinit| { - onBeforeDeinit(this.ctx.?, this); - } - - this.allocator.free(this.leak()); - this.allocator.destroy(this); - } -}; - -pub export fn MarkedArrayBuffer_deallocator(bytes_: *anyopaque, _: *anyopaque) void { - const mimalloc = @import("../allocators/mimalloc.zig"); - // zig's memory allocator interface won't work here - // mimalloc knows the size of things - // but we don't - // if (comptime Environment.allow_assert) { - // bun.assert(mimalloc.mi_check_owned(bytes_) or - // mimalloc.mi_heap_check_owned(JSC.VirtualMachine.get().arena.heap.?, bytes_)); - // } - - mimalloc.mi_free(bytes_); -} - -pub export fn BlobArrayBuffer_deallocator(_: *anyopaque, blob: *anyopaque) void { - // zig's memory allocator interface won't work here - // mimalloc knows the size of things - // but we don't - var store = bun.cast(*JSC.WebCore.Blob.Store, blob); - store.deref(); -} - -const Expect = Test.Expect; -const DescribeScope = Test.DescribeScope; -const TestScope = Test.TestScope; -const NodeFS = JSC.Node.NodeFS; -const TextEncoder = JSC.WebCore.TextEncoder; -const TextDecoder = JSC.WebCore.TextDecoder; -const TextEncoderStreamEncoder = JSC.WebCore.TextEncoderStreamEncoder; -const HTMLRewriter = JSC.Cloudflare.HTMLRewriter; -const Element = JSC.Cloudflare.Element; -const Comment = JSC.Cloudflare.Comment; -const TextChunk = JSC.Cloudflare.TextChunk; -const DocType = JSC.Cloudflare.DocType; -const EndTag = JSC.Cloudflare.EndTag; -const DocEnd = JSC.Cloudflare.DocEnd; -const AttributeIterator = JSC.Cloudflare.AttributeIterator; -const Blob = JSC.WebCore.Blob; -const Server = JSC.API.Server; -const SSLServer = JSC.API.SSLServer; -const DebugServer = JSC.API.DebugServer; -const DebugSSLServer = JSC.API.DebugSSLServer; -const SHA1 = JSC.API.Bun.Crypto.SHA1; -const MD5 = JSC.API.Bun.Crypto.MD5; -const MD4 = JSC.API.Bun.Crypto.MD4; -const SHA224 = JSC.API.Bun.Crypto.SHA224; -const SHA512 = JSC.API.Bun.Crypto.SHA512; -const SHA384 = JSC.API.Bun.Crypto.SHA384; -const SHA256 = JSC.API.Bun.Crypto.SHA256; -const SHA512_256 = JSC.API.Bun.Crypto.SHA512_256; -const MD5_SHA1 = JSC.API.Bun.Crypto.MD5_SHA1; -const FFI = JSC.FFI; - -pub const JSPropertyNameIterator = struct { - array: JSC.C.JSPropertyNameArrayRef, - count: u32, - i: u32 = 0, - - pub fn next(this: *JSPropertyNameIterator) ?JSC.C.JSStringRef { - if (this.i >= this.count) return null; - const i = this.i; - this.i += 1; - - return JSC.C.JSPropertyNameArrayGetNameAtIndex(this.array, i); - } -}; - -pub const DOMEffect = struct { - reads: [4]ID = std.mem.zeroes([4]ID), - writes: [4]ID = std.mem.zeroes([4]ID), - - pub const top = DOMEffect{ - .reads = .{ ID.Heap, ID.Heap, ID.Heap, ID.Heap }, - .writes = .{ ID.Heap, ID.Heap, ID.Heap, ID.Heap }, - }; - - pub fn forRead(read: ID) DOMEffect { - return DOMEffect{ - .reads = .{ read, ID.Heap, ID.Heap, ID.Heap }, - .writes = .{ ID.Heap, ID.Heap, ID.Heap, ID.Heap }, - }; - } - - pub fn forWrite(read: ID) DOMEffect { - return DOMEffect{ - .writes = .{ read, ID.Heap, ID.Heap, ID.Heap }, - .reads = .{ ID.Heap, ID.Heap, ID.Heap, ID.Heap }, - }; - } - - pub const pure = DOMEffect{}; - - pub fn isPure(this: DOMEffect) bool { - return this.reads[0] == ID.InvalidAbstractHeap and this.writes[0] == ID.InvalidAbstractHeap; - } - - pub const ID = enum(u8) { - InvalidAbstractHeap = 0, - World, - Stack, - Heap, - Butterfly_publicLength, - Butterfly_vectorLength, - GetterSetter_getter, - GetterSetter_setter, - JSCell_cellState, - JSCell_indexingType, - JSCell_structureID, - JSCell_typeInfoFlags, - JSObject_butterfly, - JSPropertyNameEnumerator_cachedPropertyNames, - RegExpObject_lastIndex, - NamedProperties, - IndexedInt32Properties, - IndexedDoubleProperties, - IndexedContiguousProperties, - IndexedArrayStorageProperties, - DirectArgumentsProperties, - ScopeProperties, - TypedArrayProperties, - /// Used to reflect the fact that some allocations reveal object identity */ - HeapObjectCount, - RegExpState, - MathDotRandomState, - JSDateFields, - JSMapFields, - JSSetFields, - JSWeakMapFields, - WeakSetFields, - JSInternalFields, - InternalState, - CatchLocals, - Absolute, - /// DOMJIT tells the heap range with the pair of integers. */ - DOMState, - /// Use this for writes only, to indicate that this may fire watchpoints. Usually this is never directly written but instead we test to see if a node clobbers this; it just so happens that you have to write world to clobber it. */ - Watchpoint_fire, - /// Use these for reads only, just to indicate that if the world got clobbered, then this operation will not work. */ - MiscFields, - /// Use this for writes only, just to indicate that hoisting the node is invalid. This works because we don't hoist anything that has any side effects at all. */ - SideState, - }; -}; - -fn DOMCallArgumentType(comptime Type: type) []const u8 { - const ChildType = if (@typeInfo(Type) == .pointer) std.meta.Child(Type) else Type; - return switch (ChildType) { - i8, u8, i16, u16, i32 => "JSC::SpecInt32Only", - u32, i64, u64 => "JSC::SpecInt52Any", - f64 => "JSC::SpecDoubleReal", - bool => "JSC::SpecBoolean", - JSC.JSString => "JSC::SpecString", - JSC.JSUint8Array => "JSC::SpecUint8Array", - else => @compileError("Unknown DOM type: " ++ @typeName(Type)), - }; -} - -fn DOMCallArgumentTypeWrapper(comptime Type: type) []const u8 { - const ChildType = if (@typeInfo(Type) == .pointer) std.meta.Child(Type) else Type; - return switch (ChildType) { - i32 => "int32_t", - f64 => "double", - u64 => "uint64_t", - i64 => "int64_t", - bool => "bool", - JSC.JSString => "JSC::JSString*", - JSC.JSUint8Array => "JSC::JSUint8Array*", - else => @compileError("Unknown DOM type: " ++ @typeName(Type)), - }; -} - -fn DOMCallResultType(comptime Type: type) []const u8 { - const ChildType = if (@typeInfo(Type) == .pointer) std.meta.Child(Type) else Type; - return switch (ChildType) { - i32 => "JSC::SpecInt32Only", - bool => "JSC::SpecBoolean", - JSC.JSString => "JSC::SpecString", - JSC.JSUint8Array => "JSC::SpecUint8Array", - JSC.JSCell => "JSC::SpecCell", - u52, i52 => "JSC::SpecInt52Any", - f64 => "JSC::SpecDoubleReal", - else => "JSC::SpecHeapTop", - }; -} - -pub fn DOMCall( - comptime class_name: string, - comptime Container: type, - comptime functionName: string, - comptime dom_effect: DOMEffect, -) type { - return extern struct { - const className = class_name; - pub const is_dom_call = true; - const Slowpath = @field(Container, functionName); - const SlowpathType = @TypeOf(@field(Container, functionName)); - - // Zig doesn't support @frameAddress(1) - // so we have to add a small wrapper fujnction - pub fn slowpath( - globalObject: *JSC.JSGlobalObject, - thisValue: JSC.JSValue, - arguments_ptr: [*]const JSC.JSValue, - arguments_len: usize, - ) callconv(JSC.conv) JSC.JSValue { - return JSC.toJSHostValue(globalObject, @field(Container, functionName)(globalObject, thisValue, arguments_ptr[0..arguments_len])); - } - - pub const fastpath = @field(Container, functionName ++ "WithoutTypeChecks"); - pub const Fastpath = @TypeOf(fastpath); - pub const Arguments = std.meta.ArgsTuple(Fastpath); - const PutFnType = *const fn (globalObject: *JSC.JSGlobalObject, value: JSC.JSValue) callconv(.c) void; - const put_fn = @extern(PutFnType, .{ .name = className ++ "__" ++ functionName ++ "__put" }); - - pub fn put(globalObject: *JSC.JSGlobalObject, value: JSC.JSValue) void { - put_fn(globalObject, value); - } - - pub const effect = dom_effect; - - comptime { - @export(&slowpath, .{ .name = className ++ "__" ++ functionName ++ "__slowpath" }); - @export(&fastpath, .{ .name = className ++ "__" ++ functionName ++ "__fastpath" }); - } - }; -} - -pub fn InstanceMethodType(comptime Container: type) type { - return fn (instance: *Container, globalThis: *JSC.JSGlobalObject, callframe: *JSC.CallFrame) bun.JSError!JSC.JSValue; -} - -pub fn wrapInstanceMethod( - comptime Container: type, - comptime name: string, - comptime auto_protect: bool, -) InstanceMethodType(Container) { - return struct { - const FunctionType = @TypeOf(@field(Container, name)); - const FunctionTypeInfo: std.builtin.Type.Fn = @typeInfo(FunctionType).@"fn"; - const Args = std.meta.ArgsTuple(FunctionType); - const eater = if (auto_protect) JSC.Node.ArgumentsSlice.protectEatNext else JSC.Node.ArgumentsSlice.nextEat; - - pub fn method( - this: *Container, - globalThis: *JSC.JSGlobalObject, - callframe: *JSC.CallFrame, - ) bun.JSError!JSC.JSValue { - const arguments = callframe.arguments_old(FunctionTypeInfo.params.len); - var iter = JSC.Node.ArgumentsSlice.init(globalThis.bunVM(), arguments.slice()); - var args: Args = undefined; - - const has_exception_ref: bool = comptime brk: { - for (FunctionTypeInfo.params) |param| { - if (param.type.? == JSC.C.ExceptionRef) { - break :brk true; - } - } - - break :brk false; - }; - var exception_value = [_]JSC.C.JSValueRef{null}; - const exception: JSC.C.ExceptionRef = if (comptime has_exception_ref) &exception_value else undefined; - - inline for (FunctionTypeInfo.params, 0..) |param, i| { - const ArgType = param.type.?; - switch (ArgType) { - *Container => { - args[i] = this; - }, - *JSC.JSGlobalObject => { - args[i] = globalThis; - }, - *JSC.CallFrame => { - args[i] = callframe; - }, - JSC.Node.StringOrBuffer => { - const arg = iter.nextEat() orelse { - iter.deinit(); - return globalThis.throwInvalidArguments("expected string or buffer", .{}); - }; - args[i] = try JSC.Node.StringOrBuffer.fromJS(globalThis, iter.arena.allocator(), arg) orelse { - iter.deinit(); - return globalThis.throwInvalidArguments("expected string or buffer", .{}); - }; - }, - ?JSC.Node.StringOrBuffer => { - if (iter.nextEat()) |arg| { - if (!arg.isEmptyOrUndefinedOrNull()) { - args[i] = try JSC.Node.StringOrBuffer.fromJS(globalThis, iter.arena.allocator(), arg) orelse { - iter.deinit(); - return globalThis.throwInvalidArguments("expected string or buffer", .{}); - }; - } else { - args[i] = null; - } - } else { - args[i] = null; - } - }, - JSC.ArrayBuffer => { - if (iter.nextEat()) |arg| { - args[i] = arg.asArrayBuffer(globalThis) orelse { - iter.deinit(); - return globalThis.throwInvalidArguments("expected TypedArray", .{}); - }; - } else { - iter.deinit(); - return globalThis.throwInvalidArguments("expected TypedArray", .{}); - } - }, - ?JSC.ArrayBuffer => { - if (iter.nextEat()) |arg| { - args[i] = arg.asArrayBuffer(globalThis) orelse { - iter.deinit(); - return globalThis.throwInvalidArguments("expected TypedArray", .{}); - }; - } else { - args[i] = null; - } - }, - JSC.ZigString => { - var string_value = eater(&iter) orelse { - iter.deinit(); - return globalThis.throwInvalidArguments("Missing argument", .{}); - }; - - if (string_value.isUndefinedOrNull()) { - iter.deinit(); - return globalThis.throwInvalidArguments("Expected string", .{}); - } - - args[i] = try string_value.getZigString(globalThis); - }, - ?JSC.Cloudflare.ContentOptions => { - if (iter.nextEat()) |content_arg| { - if (try content_arg.get(globalThis, "html")) |html_val| { - args[i] = .{ .html = html_val.toBoolean() }; - } - } else { - args[i] = null; - } - }, - *JSC.WebCore.Response => { - args[i] = (eater(&iter) orelse { - iter.deinit(); - return globalThis.throwInvalidArguments("Missing Response object", .{}); - }).as(JSC.WebCore.Response) orelse { - iter.deinit(); - return globalThis.throwInvalidArguments("Expected Response object", .{}); - }; - }, - *JSC.WebCore.Request => { - args[i] = (eater(&iter) orelse { - iter.deinit(); - return globalThis.throwInvalidArguments("Missing Request object", .{}); - }).as(JSC.WebCore.Request) orelse { - iter.deinit(); - return globalThis.throwInvalidArguments("Expected Request object", .{}); - }; - }, - JSC.JSValue => { - const val = eater(&iter) orelse { - iter.deinit(); - return globalThis.throwInvalidArguments("Missing argument", .{}); - }; - args[i] = val; - }, - ?JSC.JSValue => { - args[i] = eater(&iter); - }, - JSC.C.ExceptionRef => { - args[i] = exception; - }, - else => @compileError("Unexpected Type " ++ @typeName(ArgType)), - } - } - - defer iter.deinit(); - - defer { - if (comptime has_exception_ref) { - if (exception_value[0] != null) { - globalThis.throwValue(exception_value[0].?.value()); - } - } - } - - return @call(.always_inline, @field(Container, name), args); - } - }.method; -} - -pub fn wrapStaticMethod( - comptime Container: type, - comptime name: string, - comptime auto_protect: bool, -) JSC.JSHostZigFunction { - return struct { - const FunctionType = @TypeOf(@field(Container, name)); - const FunctionTypeInfo: std.builtin.Type.Fn = @typeInfo(FunctionType).@"fn"; - const Args = std.meta.ArgsTuple(FunctionType); - const eater = if (auto_protect) JSC.Node.ArgumentsSlice.protectEatNext else JSC.Node.ArgumentsSlice.nextEat; - - pub fn method( - globalThis: *JSC.JSGlobalObject, - callframe: *JSC.CallFrame, - ) bun.JSError!JSC.JSValue { - const arguments = callframe.arguments_old(FunctionTypeInfo.params.len); - var iter = JSC.Node.ArgumentsSlice.init(globalThis.bunVM(), arguments.slice()); - var args: Args = undefined; - - inline for (FunctionTypeInfo.params, 0..) |param, i| { - const ArgType = param.type.?; - switch (param.type.?) { - *JSC.JSGlobalObject => { - args[i] = globalThis; - }, - JSC.Node.StringOrBuffer => { - const arg = iter.nextEat() orelse { - iter.deinit(); - return globalThis.throwInvalidArguments("expected string or buffer", .{}); - }; - args[i] = try JSC.Node.StringOrBuffer.fromJS(globalThis, iter.arena.allocator(), arg) orelse { - iter.deinit(); - return globalThis.throwInvalidArguments("expected string or buffer", .{}); - }; - }, - ?JSC.Node.StringOrBuffer => { - if (iter.nextEat()) |arg| { - args[i] = try JSC.Node.StringOrBuffer.fromJS(globalThis, iter.arena.allocator(), arg) orelse brk: { - if (arg == .undefined) { - break :brk null; - } - - iter.deinit(); - return globalThis.throwInvalidArguments("expected string or buffer", .{}); - }; - } else { - args[i] = null; - } - }, - JSC.Node.BlobOrStringOrBuffer => { - if (iter.nextEat()) |arg| { - args[i] = try JSC.Node.BlobOrStringOrBuffer.fromJS(globalThis, iter.arena.allocator(), arg) orelse { - iter.deinit(); - return globalThis.throwInvalidArguments("expected blob, string or buffer", .{}); - }; - } else { - iter.deinit(); - return globalThis.throwInvalidArguments("expected blob, string or buffer", .{}); - } - }, - JSC.ArrayBuffer => { - if (iter.nextEat()) |arg| { - args[i] = arg.asArrayBuffer(globalThis) orelse { - iter.deinit(); - return globalThis.throwInvalidArguments("expected TypedArray", .{}); - }; - } else { - iter.deinit(); - return globalThis.throwInvalidArguments("expected TypedArray", .{}); - } - }, - ?JSC.ArrayBuffer => { - if (iter.nextEat()) |arg| { - args[i] = arg.asArrayBuffer(globalThis) orelse { - iter.deinit(); - return globalThis.throwInvalidArguments("expected TypedArray", .{}); - }; - } else { - args[i] = null; - } - }, - JSC.ZigString => { - var string_value = eater(&iter) orelse { - iter.deinit(); - return globalThis.throwInvalidArguments("Missing argument", .{}); - }; - - if (string_value.isUndefinedOrNull()) { - iter.deinit(); - return globalThis.throwInvalidArguments("Expected string", .{}); - } - - args[i] = try string_value.getZigString(globalThis); - }, - ?JSC.Cloudflare.ContentOptions => { - if (iter.nextEat()) |content_arg| { - if (try content_arg.get(globalThis, "html")) |html_val| { - args[i] = .{ .html = html_val.toBoolean() }; - } - } else { - args[i] = null; - } - }, - *JSC.WebCore.Response => { - args[i] = (eater(&iter) orelse { - iter.deinit(); - return globalThis.throwInvalidArguments("Missing Response object", .{}); - }).as(JSC.WebCore.Response) orelse { - iter.deinit(); - return globalThis.throwInvalidArguments("Expected Response object", .{}); - }; - }, - *JSC.WebCore.Request => { - args[i] = (eater(&iter) orelse { - iter.deinit(); - return globalThis.throwInvalidArguments("Missing Request object", .{}); - }).as(JSC.WebCore.Request) orelse { - iter.deinit(); - return globalThis.throwInvalidArguments("Expected Request object", .{}); - }; - }, - JSC.WebCore.JSValue => { - const val = eater(&iter) orelse { - iter.deinit(); - return globalThis.throwInvalidArguments("Missing argument", .{}); - }; - args[i] = val; - }, - ?JSC.WebCore.JSValue => { - args[i] = eater(&iter); - }, - else => @compileError(std.fmt.comptimePrint("Unexpected Type " ++ @typeName(ArgType) ++ " at argument {d} in {s}#{s}", .{ i, @typeName(Container), name })), - } - } - - defer iter.deinit(); - - return @call(.always_inline, @field(Container, name), args); - } - }.method; -} - -/// Track whether an object should keep the event loop alive -pub const Ref = struct { - has: bool = false, - - pub fn init() Ref { - return .{}; - } - - pub fn unref(this: *Ref, vm: *JSC.VirtualMachine) void { - if (!this.has) - return; - this.has = false; - vm.active_tasks -= 1; - } - - pub fn ref(this: *Ref, vm: *JSC.VirtualMachine) void { - if (this.has) - return; - this.has = true; - vm.active_tasks += 1; - } -}; - -pub const Strong = @import("./Strong.zig"); -pub const Weak = @import("./Weak.zig").Weak; -pub const WeakRefType = @import("./Weak.zig").WeakRefType; - -pub const BinaryType = enum(u4) { - Buffer, - ArrayBuffer, - Uint8Array, - Uint16Array, - Uint32Array, - Int8Array, - Int16Array, - Int32Array, - Float16Array, - Float32Array, - Float64Array, - // DataView, - - pub fn toJSType(this: BinaryType) JSC.JSValue.JSType { - return switch (this) { - .ArrayBuffer => .ArrayBuffer, - .Buffer => .Uint8Array, - // .DataView => .DataView, - .Float32Array => .Float32Array, - .Float16Array => .Float16Array, - .Float64Array => .Float64Array, - .Int16Array => .Int16Array, - .Int32Array => .Int32Array, - .Int8Array => .Int8Array, - .Uint16Array => .Uint16Array, - .Uint32Array => .Uint32Array, - .Uint8Array => .Uint8Array, - }; - } - - pub fn toTypedArrayType(this: BinaryType) JSC.C.JSTypedArrayType { - return this.toJSType().toC(); - } - - pub const Map = bun.ComptimeStringMap( - BinaryType, - .{ - .{ "ArrayBuffer", .ArrayBuffer }, - .{ "Buffer", .Buffer }, - // .{ "DataView", .DataView }, - .{ "Float32Array", .Float32Array }, - .{ "Float16Array", .Float16Array }, - .{ "Float64Array", .Float64Array }, - .{ "Int16Array", .Int16Array }, - .{ "Int32Array", .Int32Array }, - .{ "Int8Array", .Int8Array }, - .{ "Uint16Array", .Uint16Array }, - .{ "Uint32Array", .Uint32Array }, - .{ "Uint8Array", .Uint8Array }, - .{ "arraybuffer", .ArrayBuffer }, - .{ "buffer", .Buffer }, - // .{ "dataview", .DataView }, - .{ "float16array", .Float16Array }, - .{ "float32array", .Float32Array }, - .{ "float64array", .Float64Array }, - .{ "int16array", .Int16Array }, - .{ "int32array", .Int32Array }, - .{ "int8array", .Int8Array }, - .{ "nodebuffer", .Buffer }, - .{ "uint16array", .Uint16Array }, - .{ "uint32array", .Uint32Array }, - .{ "uint8array", .Uint8Array }, - }, - ); - - pub fn fromString(input: []const u8) ?BinaryType { - return Map.get(input); - } - - pub fn fromJSValue(globalThis: *JSC.JSGlobalObject, input: JSC.JSValue) bun.JSError!?BinaryType { - if (input.isString()) { - return Map.getWithEql(try input.toBunString(globalThis), bun.String.eqlComptime); - } - - return null; - } - - /// This clones bytes - pub fn toJS(this: BinaryType, bytes: []const u8, globalThis: *JSC.JSGlobalObject) JSC.JSValue { - switch (this) { - .Buffer => return JSC.ArrayBuffer.createBuffer(globalThis, bytes), - .ArrayBuffer => return JSC.ArrayBuffer.create(globalThis, bytes, .ArrayBuffer), - .Uint8Array => return JSC.ArrayBuffer.create(globalThis, bytes, .Uint8Array), - - // These aren't documented, but they are supported - .Uint16Array, .Uint32Array, .Int8Array, .Int16Array, .Int32Array, .Float16Array, .Float32Array, .Float64Array => { - const buffer = JSC.ArrayBuffer.create(globalThis, bytes, .ArrayBuffer); - return JSC.JSValue.c(JSC.C.JSObjectMakeTypedArrayWithArrayBuffer(globalThis, this.toTypedArrayType(), buffer.asObjectRef(), null)); - }, - } - } -}; - -pub const AsyncTaskTracker = struct { - id: u64, - - pub fn init(vm: *JSC.VirtualMachine) AsyncTaskTracker { - return .{ .id = vm.nextAsyncTaskID() }; - } - - pub fn didSchedule(this: AsyncTaskTracker, globalObject: *JSC.JSGlobalObject) void { - if (this.id == 0) return; - - bun.JSC.Debugger.didScheduleAsyncCall(globalObject, bun.JSC.Debugger.AsyncCallType.EventListener, this.id, true); - } - - pub fn didCancel(this: AsyncTaskTracker, globalObject: *JSC.JSGlobalObject) void { - if (this.id == 0) return; - - bun.JSC.Debugger.didCancelAsyncCall(globalObject, bun.JSC.Debugger.AsyncCallType.EventListener, this.id); - } - - pub fn willDispatch(this: AsyncTaskTracker, globalObject: *JSC.JSGlobalObject) void { - if (this.id == 0) { - return; - } - - bun.JSC.Debugger.willDispatchAsyncCall(globalObject, bun.JSC.Debugger.AsyncCallType.EventListener, this.id); - } - - pub fn didDispatch(this: AsyncTaskTracker, globalObject: *JSC.JSGlobalObject) void { - if (this.id == 0) { - return; - } - - bun.JSC.Debugger.didDispatchAsyncCall(globalObject, bun.JSC.Debugger.AsyncCallType.EventListener, this.id); - } -}; - -pub const MemoryReportingAllocator = struct { - child_allocator: std.mem.Allocator, - memory_cost: std.atomic.Value(usize) = std.atomic.Value(usize).init(0), - const log = Output.scoped(.MEM, false); - - fn alloc(context: *anyopaque, n: usize, alignment: std.mem.Alignment, return_address: usize) ?[*]u8 { - const this: *MemoryReportingAllocator = @alignCast(@ptrCast(context)); - const result = this.child_allocator.rawAlloc(n, alignment, return_address) orelse return null; - _ = this.memory_cost.fetchAdd(n, .monotonic); - if (comptime Environment.allow_assert) - log("malloc({d}) = {d}", .{ n, this.memory_cost.raw }); - return result; - } - - pub fn discard(this: *MemoryReportingAllocator, buf: []const u8) void { - _ = this.memory_cost.fetchSub(buf.len, .monotonic); - if (comptime Environment.allow_assert) - log("discard({d}) = {d}", .{ buf.len, this.memory_cost.raw }); - } - - fn resize(context: *anyopaque, buf: []u8, alignment: std.mem.Alignment, new_len: usize, ret_addr: usize) bool { - const this: *MemoryReportingAllocator = @alignCast(@ptrCast(context)); - if (this.child_allocator.rawResize(buf, alignment, new_len, ret_addr)) { - _ = this.memory_cost.fetchAdd(new_len -| buf.len, .monotonic); - if (comptime Environment.allow_assert) - log("resize() = {d}", .{this.memory_cost.raw}); - return true; - } else { - return false; - } - } - - fn free(context: *anyopaque, buf: []u8, alignment: std.mem.Alignment, ret_addr: usize) void { - const this: *MemoryReportingAllocator = @alignCast(@ptrCast(context)); - this.child_allocator.rawFree(buf, alignment, ret_addr); - - if (comptime Environment.allow_assert) { - // check for overflow, racily - const prev = this.memory_cost.fetchSub(buf.len, .monotonic); - _ = prev; - // bun.assert(prev > this.memory_cost.load(.monotonic)); - - log("free({d}) = {d}", .{ buf.len, this.memory_cost.raw }); - } - } - - pub fn wrap(this: *MemoryReportingAllocator, allocator_: std.mem.Allocator) std.mem.Allocator { - this.* = .{ - .child_allocator = allocator_, - }; - - return this.allocator(); - } - - pub fn allocator(this: *MemoryReportingAllocator) std.mem.Allocator { - return std.mem.Allocator{ - .ptr = this, - .vtable = &MemoryReportingAllocator.VTable, - }; - } - - pub fn report(this: *MemoryReportingAllocator, vm: *JSC.VM) void { - const mem = this.memory_cost.load(.monotonic); - if (mem > 0) { - vm.reportExtraMemory(mem); - if (comptime Environment.allow_assert) - log("report({d})", .{mem}); - } - } - - pub inline fn assert(this: *const MemoryReportingAllocator) void { - if (comptime !Environment.allow_assert) { - return; - } - - const memory_cost = this.memory_cost.load(.monotonic); - if (memory_cost > 0) { - Output.panic("MemoryReportingAllocator still has {d} bytes allocated", .{memory_cost}); - } - } - - pub const VTable = std.mem.Allocator.VTable{ - .alloc = &MemoryReportingAllocator.alloc, - .resize = &MemoryReportingAllocator.resize, - .remap = &std.mem.Allocator.noRemap, - .free = &MemoryReportingAllocator.free, - }; -}; - -/// According to https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Date, -/// maximum Date in JavaScript is less than Number.MAX_SAFE_INTEGER (u52). -pub const init_timestamp = std.math.maxInt(JSC.JSTimeType); -pub const JSTimeType = u52; - -pub fn toJSTime(sec: isize, nsec: isize) JSTimeType { - const millisec = @as(u64, @intCast(@divTrunc(nsec, std.time.ns_per_ms))); - return @as(JSTimeType, @truncate(@as(u64, @intCast(sec * std.time.ms_per_s)) + millisec)); -} diff --git a/src/bun.js/bindings/CallFrame.zig b/src/bun.js/bindings/CallFrame.zig index 8d105dde18..2950f8e4fb 100644 --- a/src/bun.js/bindings/CallFrame.zig +++ b/src/bun.js/bindings/CallFrame.zig @@ -43,6 +43,11 @@ pub const CallFrame = opaque { return self.asUnsafeJSValueArray()[offset_callee]; } + /// Return a basic iterator. + pub fn iterate(call_frame: *const CallFrame) Iterator { + return .{ .rest = call_frame.arguments() }; + } + /// From JavaScriptCore/interpreter/CallFrame.h /// /// | ...... | | @@ -194,4 +199,106 @@ pub const CallFrame = opaque { pub fn describeFrame(self: *const CallFrame) [:0]const u8 { return std.mem.span(Bun__CallFrame__describeFrame(self)); } + + pub const Iterator = struct { + rest: []const JSValue, + pub fn next(it: *Iterator) ?JSValue { + if (it.rest.len == 0) return null; + const current = it.rest[0]; + it.rest = it.rest[1..]; + return current; + } + }; + + /// This is an advanced iterator struct which is used by various APIs. In + /// Node.fs, `will_be_async` is set to true which allows string/path APIs to + /// know if they have to do threadsafe clones. + /// + /// Prefer `Iterator` for a simpler iterator. + pub const ArgumentsSlice = struct { + remaining: []const JSC.JSValue, + vm: *JSC.VirtualMachine, + arena: bun.ArenaAllocator = bun.ArenaAllocator.init(bun.default_allocator), + all: []const JSC.JSValue, + threw: bool = false, + protected: bun.bit_set.IntegerBitSet(32) = bun.bit_set.IntegerBitSet(32).initEmpty(), + will_be_async: bool = false, + + pub fn unprotect(slice: *ArgumentsSlice) void { + var iter = slice.protected.iterator(.{}); + const ctx = slice.vm.global; + while (iter.next()) |i| { + JSC.C.JSValueUnprotect(ctx, slice.all[i].asObjectRef()); + } + slice.protected = bun.bit_set.IntegerBitSet(32).initEmpty(); + } + + pub fn deinit(slice: *ArgumentsSlice) void { + slice.unprotect(); + slice.arena.deinit(); + } + + pub fn protectEat(slice: *ArgumentsSlice) void { + if (slice.remaining.len == 0) return; + const index = slice.all.len - slice.remaining.len; + slice.protected.set(index); + JSC.C.JSValueProtect(slice.vm.global, slice.all[index].asObjectRef()); + slice.eat(); + } + + pub fn protectEatNext(slice: *ArgumentsSlice) ?JSC.JSValue { + if (slice.remaining.len == 0) return null; + return slice.nextEat(); + } + + pub fn from(vm: *JSC.VirtualMachine, slice: []const JSC.JSValueRef) ArgumentsSlice { + return init(vm, @as([*]const JSC.JSValue, @ptrCast(slice.ptr))[0..slice.len]); + } + pub fn init(vm: *JSC.VirtualMachine, slice: []const JSC.JSValue) ArgumentsSlice { + return ArgumentsSlice{ + .remaining = slice, + .vm = vm, + .all = slice, + .arena = bun.ArenaAllocator.init(vm.allocator), + }; + } + + pub fn initAsync(vm: *JSC.VirtualMachine, slice: []const JSC.JSValue) ArgumentsSlice { + return ArgumentsSlice{ + .remaining = bun.default_allocator.dupe(JSC.JSValue, slice), + .vm = vm, + .all = slice, + .arena = bun.ArenaAllocator.init(bun.default_allocator), + }; + } + + pub inline fn len(slice: *const ArgumentsSlice) u16 { + return @as(u16, @truncate(slice.remaining.len)); + } + + pub fn eat(slice: *ArgumentsSlice) void { + if (slice.remaining.len == 0) { + return; + } + + slice.remaining = slice.remaining[1..]; + } + + /// Peek the next argument without eating it + pub fn next(slice: *ArgumentsSlice) ?JSC.JSValue { + if (slice.remaining.len == 0) { + return null; + } + + return slice.remaining[0]; + } + + pub fn nextEat(slice: *ArgumentsSlice) ?JSC.JSValue { + if (slice.remaining.len == 0) { + return null; + } + defer slice.eat(); + return slice.remaining[0]; + } + }; }; diff --git a/src/bun.js/bindings/Debugger.zig b/src/bun.js/bindings/Debugger.zig deleted file mode 100644 index 47ea34bb16..0000000000 --- a/src/bun.js/bindings/Debugger.zig +++ /dev/null @@ -1,33 +0,0 @@ -const bun = @import("bun"); -const JSC = bun.JSC; - -pub const Debugger = struct { - pub const AsyncCallType = enum(u8) { - DOMTimer = 1, - EventListener = 2, - PostMessage = 3, - RequestAnimationFrame = 4, - Microtask = 5, - }; - extern fn Debugger__didScheduleAsyncCall(*JSC.JSGlobalObject, AsyncCallType, u64, bool) void; - extern fn Debugger__didCancelAsyncCall(*JSC.JSGlobalObject, AsyncCallType, u64) void; - extern fn Debugger__didDispatchAsyncCall(*JSC.JSGlobalObject, AsyncCallType, u64) void; - extern fn Debugger__willDispatchAsyncCall(*JSC.JSGlobalObject, AsyncCallType, u64) void; - - pub fn didScheduleAsyncCall(globalObject: *JSC.JSGlobalObject, call: AsyncCallType, id: u64, single_shot: bool) void { - JSC.markBinding(@src()); - Debugger__didScheduleAsyncCall(globalObject, call, id, single_shot); - } - pub fn didCancelAsyncCall(globalObject: *JSC.JSGlobalObject, call: AsyncCallType, id: u64) void { - JSC.markBinding(@src()); - Debugger__didCancelAsyncCall(globalObject, call, id); - } - pub fn didDispatchAsyncCall(globalObject: *JSC.JSGlobalObject, call: AsyncCallType, id: u64) void { - JSC.markBinding(@src()); - Debugger__didDispatchAsyncCall(globalObject, call, id); - } - pub fn willDispatchAsyncCall(globalObject: *JSC.JSGlobalObject, call: AsyncCallType, id: u64) void { - JSC.markBinding(@src()); - Debugger__willDispatchAsyncCall(globalObject, call, id); - } -}; diff --git a/src/bun.js/bindings/FFI.zig b/src/bun.js/bindings/FFI.zig index bac1c5f4a0..020cb4af92 100644 --- a/src/bun.js/bindings/FFI.zig +++ b/src/bun.js/bindings/FFI.zig @@ -1,7 +1,8 @@ // This is zig translate-c run on ffi.h // it turns out: FFI.h is faster than our implementation that calls into C++ bindings // so we just use this in some cases - +const bun = @import("bun"); +const jsc = bun.jsc; pub const @"bool" = bool; pub const JSCell = ?*anyopaque; const struct_unnamed_1 = extern struct { @@ -14,7 +15,7 @@ pub const union_EncodedJSValue = extern union { asBits: struct_unnamed_1, asPtr: ?*anyopaque, asDouble: f64, - asJSValue: @import("./bindings.zig").JSValue, + asJSValue: jsc.JSValue, }; pub const EncodedJSValue = union_EncodedJSValue; pub export var ValueUndefined: EncodedJSValue = EncodedJSValue{ @@ -59,8 +60,8 @@ pub inline fn JSVALUE_TO_INT64(arg_value: EncodedJSValue) i64 { } pub extern fn JSVALUE_TO_UINT64_SLOW(value: EncodedJSValue) u64; pub extern fn JSVALUE_TO_INT64_SLOW(value: EncodedJSValue) i64; -pub const UINT64_TO_JSVALUE_SLOW = @import("./bindings.zig").JSValue.fromUInt64NoTruncate; -pub const INT64_TO_JSVALUE_SLOW = @import("./bindings.zig").JSValue.fromInt64NoTruncate; +pub const UINT64_TO_JSVALUE_SLOW = jsc.JSValue.fromUInt64NoTruncate; +pub const INT64_TO_JSVALUE_SLOW = jsc.JSValue.fromInt64NoTruncate; pub inline fn UINT64_TO_JSVALUE(arg_globalObject: ?*anyopaque, arg_val: u64) EncodedJSValue { const globalObject = arg_globalObject; const val = arg_val; @@ -70,7 +71,7 @@ pub inline fn UINT64_TO_JSVALUE(arg_globalObject: ?*anyopaque, arg_val: u64) Enc if (val < @as(c_ulonglong, @bitCast(@as(c_longlong, @as(c_long, 9007199254740991))))) { return DOUBLE_TO_JSVALUE(@as(f64, @floatFromInt(val))); } - return UINT64_TO_JSVALUE_SLOW(@as(*@import("./bindings.zig").JSGlobalObject, @ptrCast(globalObject.?)), val).asEncoded(); + return UINT64_TO_JSVALUE_SLOW(@as(*jsc.JSGlobalObject, @ptrCast(globalObject.?)), val).asEncoded(); } pub inline fn INT64_TO_JSVALUE(arg_globalObject: ?*anyopaque, arg_val: i64) EncodedJSValue { const globalObject = arg_globalObject; @@ -81,7 +82,7 @@ pub inline fn INT64_TO_JSVALUE(arg_globalObject: ?*anyopaque, arg_val: i64) Enco if ((val >= @as(c_longlong, @bitCast(@as(c_longlong, -@as(c_long, 9007199254740991))))) and (val <= @as(c_longlong, @bitCast(@as(c_longlong, @as(c_long, 9007199254740991)))))) { return DOUBLE_TO_JSVALUE(@as(f64, @floatFromInt(val))); } - return INT64_TO_JSVALUE_SLOW(@as(*@import("./bindings.zig").JSGlobalObject, @ptrCast(globalObject.?)), val).asEncoded(); + return INT64_TO_JSVALUE_SLOW(@as(*jsc.JSGlobalObject, @ptrCast(globalObject.?)), val).asEncoded(); } pub inline fn INT32_TO_JSVALUE(arg_val: i32) EncodedJSValue { return .{ .asInt64 = @as(i64, @bitCast(@as(c_ulonglong, 18446181123756130304) | @as(c_ulonglong, @bitCast(@as(c_ulonglong, @as(u32, @bitCast(arg_val))))))) }; diff --git a/src/bun.js/bindings/JSFunction.zig b/src/bun.js/bindings/JSFunction.zig index 151baa7677..41b8a852f2 100644 --- a/src/bun.js/bindings/JSFunction.zig +++ b/src/bun.js/bindings/JSFunction.zig @@ -3,7 +3,7 @@ const bun = @import("bun"); const string = bun.string; const Output = bun.Output; const JSC = bun.JSC; -const JSHostFunctionType = JSC.JSHostFunctionType; +const JSHostFn = JSC.JSHostFn; const ZigString = JSC.ZigString; const String = bun.String; const JSGlobalObject = JSC.JSGlobalObject; @@ -25,23 +25,23 @@ pub const JSFunction = opaque { const CreateJSFunctionOptions = struct { implementation_visibility: ImplementationVisibility = .public, intrinsic: Intrinsic = .none, - constructor: ?*const JSHostFunctionType = null, + constructor: ?*const JSHostFn = null, }; extern fn JSFunction__createFromZig( global: *JSGlobalObject, fn_name: bun.String, - implementation: *const JSHostFunctionType, + implementation: *const JSHostFn, arg_count: u32, implementation_visibility: ImplementationVisibility, intrinsic: Intrinsic, - constructor: ?*const JSHostFunctionType, + constructor: ?*const JSHostFn, ) JSValue; pub fn create( global: *JSGlobalObject, fn_name: anytype, - comptime implementation: JSC.JSHostZigFunction, + comptime implementation: JSC.JSHostFnZig, function_length: u32, options: CreateJSFunctionOptions, ) JSValue { @@ -51,7 +51,7 @@ pub const JSFunction = opaque { bun.String => fn_name, else => bun.String.init(fn_name), }, - JSC.toJSHostFunction(implementation), + JSC.toJSHostFn(implementation), function_length, options.implementation_visibility, options.intrinsic, diff --git a/src/bun.js/bindings/JSGlobalObject.zig b/src/bun.js/bindings/JSGlobalObject.zig index e293eee25b..b7e4dd0b28 100644 --- a/src/bun.js/bindings/JSGlobalObject.zig +++ b/src/bun.js/bindings/JSGlobalObject.zig @@ -40,7 +40,7 @@ pub const JSGlobalObject = opaque { } pub fn throwInvalidArguments(this: *JSGlobalObject, comptime fmt: [:0]const u8, args: anytype) bun.JSError { - const err = JSC.toInvalidArguments(fmt, args, this); + const err = this.toInvalidArguments(fmt, args); return this.throwValue(err); } @@ -64,8 +64,8 @@ pub const JSGlobalObject = opaque { return this.ERR(.INVALID_ARG_TYPE, comptime std.fmt.comptimePrint("Expected {s} to be a {s} for '{s}'.", .{ field, typename, name_ }), .{}).toJS(); } - pub fn toJS(this: *JSC.JSGlobalObject, value: anytype, comptime lifetime: JSC.Lifetime) JSC.JSValue { - return JSC.toJS(this, @TypeOf(value), value, lifetime); + pub fn toJS(this: *JSC.JSGlobalObject, value: anytype, comptime lifetime: JSC.JSValue.FromAnyLifetime) JSC.JSValue { + return .fromAny(this, @TypeOf(value), value, lifetime); } /// "Expected {field} to be a {typename} for '{name}'." @@ -201,7 +201,7 @@ pub const JSGlobalObject = opaque { comptime expected: usize, got: usize, ) JSC.JSValue { - return JSC.toTypeError(.MISSING_ARGS, "Not enough arguments to '" ++ name_ ++ "'. Expected {d}, got {d}.", .{ expected, got }, this); + return this.toTypeError(.MISSING_ARGS, "Not enough arguments to '" ++ name_ ++ "'. Expected {d}, got {d}.", .{ expected, got }); } /// Not enough arguments passed to function named `name_` @@ -718,7 +718,7 @@ pub const JSGlobalObject = opaque { // when querying from JavaScript, 'func.len' comptime argument_count: u32, ) JSValue { - return JSC.NewRuntimeFunction(global, ZigString.static(display_name), argument_count, JSC.toJSHostFunction(function), false, false, null); + return JSC.host_fn.NewRuntimeFunction(global, ZigString.static(display_name), argument_count, JSC.toJSHostFn(function), false, false, null); } /// Get a lazily-initialized `JSC::String` from `BunCommonStrings.h`. @@ -791,6 +791,50 @@ pub const JSGlobalObject = opaque { @panic("A C++ exception occurred"); } + pub fn createError( + globalThis: *JSC.JSGlobalObject, + comptime fmt: string, + args: anytype, + ) JSC.JSValue { + if (comptime std.meta.fields(@TypeOf(args)).len == 0) { + var zig_str = JSC.ZigString.init(fmt); + if (comptime !strings.isAllASCII(fmt)) { + zig_str.markUTF16(); + } + + return zig_str.toErrorInstance(globalThis); + } else { + var fallback = std.heap.stackFallback(256, bun.default_allocator); + var alloc = fallback.get(); + + const buf = std.fmt.allocPrint(alloc, fmt, args) catch unreachable; + var zig_str = JSC.ZigString.init(buf); + zig_str.detectEncoding(); + // it alwayas clones + const res = zig_str.toErrorInstance(globalThis); + alloc.free(buf); + return res; + } + } + + pub fn toTypeError( + global: *JSC.JSGlobalObject, + code: JSC.Error, + comptime fmt: [:0]const u8, + args: anytype, + ) JSC.JSValue { + return code.fmt(global, fmt, args); + } + + pub fn toInvalidArguments( + global: *JSC.JSGlobalObject, + comptime fmt: [:0]const u8, + args: anytype, + ) JSC.JSValue { + @branchHint(.cold); + return JSC.Error.INVALID_ARG_TYPE.fmt(global, fmt, args); + } + pub const Extern = [_][]const u8{ "create", "getModuleRegistryMap", "resetModuleRegistryMap" }; comptime { diff --git a/src/bun.js/bindings/JSObject.zig b/src/bun.js/bindings/JSObject.zig index deb9fc69f7..62688560fa 100644 --- a/src/bun.js/bindings/JSObject.zig +++ b/src/bun.js/bindings/JSObject.zig @@ -6,7 +6,13 @@ const JSGlobalObject = JSC.JSGlobalObject; const ZigString = JSC.ZigString; const JSError = bun.JSError; +extern const JSC__JSObject__maxInlineCapacity: c_uint; + pub const JSObject = opaque { + pub inline fn maxInlineCapacity() c_uint { + return JSC__JSObject__maxInlineCapacity; + } + extern fn JSC__JSObject__getIndex(this: JSValue, globalThis: *JSGlobalObject, i: u32) JSValue; extern fn JSC__JSObject__putRecord(this: *JSObject, global: *JSGlobalObject, key: *ZigString, values: [*]ZigString, len: usize) void; extern fn Bun__JSObject__getCodePropertyVMInquiry(global: *JSGlobalObject, obj: *JSObject) JSValue; diff --git a/src/bun.js/bindings/JSValue.zig b/src/bun.js/bindings/JSValue.zig index 89aaed4ad3..18314dd62c 100644 --- a/src/bun.js/bindings/JSValue.zig +++ b/src/bun.js/bindings/JSValue.zig @@ -23,7 +23,7 @@ pub const JSValue = enum(i64) { /// When JavaScriptCore throws something, it returns a null cell (0). The /// exception is set on the global object. ABI-compatible with EncodedJSValue. - pub const MaybeException = enum(JSValueReprInt) { + pub const MaybeException = enum(backing_int) { zero = 0, _, @@ -640,14 +640,16 @@ pub const JSValue = enum(i64) { ).unwrap(); } - pub fn callNextTick(function: JSValue, global: *JSGlobalObject, args: anytype) void { + extern fn Bun__Process__queueNextTick1(*JSGlobalObject, func: JSValue, JSValue) void; + extern fn Bun__Process__queueNextTick2(*JSGlobalObject, func: JSValue, JSValue, JSValue) void; + + pub inline fn callNextTick(function: JSValue, global: *JSGlobalObject, args: anytype) void { if (Environment.isDebug) { bun.assert(function.isCallable()); } - const num_args = @typeInfo(@TypeOf(args)).array.len; - switch (num_args) { - 1 => JSC.Bun__Process__queueNextTick1(@ptrCast(global), function, args[0]), - 2 => JSC.Bun__Process__queueNextTick2(@ptrCast(global), function, args[0], args[1]), + switch (comptime bun.len(@as(@TypeOf(args), undefined))) { + 1 => Bun__Process__queueNextTick1(@ptrCast(global), function, args[0]), + 2 => Bun__Process__queueNextTick2(@ptrCast(global), function, args[0], args[1]), else => @compileError("needs more copy paste"), } } @@ -940,7 +942,7 @@ pub const JSValue = enum(i64) { JSC.markBinding(@src()); @setRuntimeSafety(false); if (allocator) |alloc| { - return JSBuffer__bufferFromPointerAndLengthAndDeinit(globalObject, slice.ptr, slice.len, alloc.ptr, JSC.MarkedArrayBuffer_deallocator); + return JSBuffer__bufferFromPointerAndLengthAndDeinit(globalObject, slice.ptr, slice.len, alloc.ptr, JSC.array_buffer.MarkedArrayBuffer_deallocator); } else { return JSBuffer__bufferFromPointerAndLengthAndDeinit(globalObject, slice.ptr, slice.len, null, null); } @@ -1780,16 +1782,16 @@ pub const JSValue = enum(i64) { return JSC__JSValue__symbolKeyFor(this, global, str); } - extern fn JSC__JSValue___then(this: JSValue, global: *JSGlobalObject, ctx: JSValue, resolve: JSC.JSHostFunctionPtr, reject: JSC.JSHostFunctionPtr) void; - pub fn _then(this: JSValue, global: *JSGlobalObject, ctx: JSValue, resolve: JSNativeFn, reject: JSNativeFn) void { + extern fn JSC__JSValue___then(this: JSValue, global: *JSGlobalObject, ctx: JSValue, resolve: *const JSC.JSHostFn, reject: *const JSC.JSHostFn) void; + pub fn _then(this: JSValue, global: *JSGlobalObject, ctx: JSValue, resolve: JSC.JSHostFnZig, reject: JSC.JSHostFnZig) void { return JSC__JSValue___then(this, global, ctx, toJSHostFunction(resolve), toJSHostFunction(reject)); } - pub fn _then2(this: JSValue, global: *JSGlobalObject, ctx: JSValue, resolve: JSC.JSHostFunctionPtr, reject: JSC.JSHostFunctionPtr) void { + pub fn _then2(this: JSValue, global: *JSGlobalObject, ctx: JSValue, resolve: *const JSC.JSHostFn, reject: *const JSC.JSHostFn) void { return JSC__JSValue___then(this, global, ctx, resolve, reject); } - pub fn then(this: JSValue, global: *JSGlobalObject, ctx: ?*anyopaque, resolve: JSNativeFn, reject: JSNativeFn) void { + pub fn then(this: JSValue, global: *JSGlobalObject, ctx: ?*anyopaque, resolve: JSC.JSHostFnZig, reject: JSC.JSHostFnZig) void { if (comptime bun.Environment.allow_assert) bun.assert(JSValue.fromPtr(ctx).asPtr(anyopaque) == ctx.?); return this._then(global, JSValue.fromPtr(ctx), resolve, reject); @@ -2303,15 +2305,15 @@ pub const JSValue = enum(i64) { pub fn toFmt( this: JSValue, - formatter: *Exports.ConsoleObject.Formatter, - ) Exports.ConsoleObject.Formatter.ZigFormatter { + formatter: *JSC.ConsoleObject.Formatter, + ) JSC.ConsoleObject.Formatter.ZigFormatter { formatter.remaining_values = &[_]JSValue{}; if (formatter.map_node != null) { formatter.deinit(); } formatter.stack_check.update(); - return Exports.ConsoleObject.Formatter.ZigFormatter{ + return JSC.ConsoleObject.Formatter.ZigFormatter{ .formatter = formatter, .value = this, }; @@ -2530,12 +2532,12 @@ pub const JSValue = enum(i64) { // TODO: remove this (no replacement) pub inline fn c(this: C_API.JSValueRef) JSValue { - return @as(JSValue, @enumFromInt(@as(JSValueReprInt, @bitCast(@intFromPtr(this))))); + return @as(JSValue, @enumFromInt(@as(backing_int, @bitCast(@intFromPtr(this))))); } // TODO: remove this (no replacement) pub inline fn fromRef(this: C_API.JSValueRef) JSValue { - return @as(JSValue, @enumFromInt(@as(JSValueReprInt, @bitCast(@intFromPtr(this))))); + return @as(JSValue, @enumFromInt(@as(backing_int, @bitCast(@intFromPtr(this))))); } // TODO: remove this (no replacement) @@ -2641,15 +2643,137 @@ pub const JSValue = enum(i64) { return out; } + pub const FromAnyLifetime = enum { allocated, temporary }; + + /// Marshall a zig value into a JSValue using comptime reflection. + /// + /// - Primitives are converted to their JS equivalent. + /// - Types with `toJS` or `toJSNewlyCreated` methods have them called + /// - Slices are converted to JS arrays + /// - Enums are converted to 32-bit numbers. + /// + /// `lifetime` describes the lifetime of `value`. If it must be copied, specify `temporary`. + pub fn fromAny( + globalObject: *JSC.JSGlobalObject, + comptime T: type, + value: T, + comptime lifetime: FromAnyLifetime, + ) JSC.JSValue { + const Type = comptime brk: { + var CurrentType = T; + if (@typeInfo(T) == .optional) { + CurrentType = @typeInfo(T).optional.child; + } + break :brk if (@typeInfo(CurrentType) == .pointer and @typeInfo(CurrentType).pointer.size == .one) + @typeInfo(CurrentType).pointer.child + else + CurrentType; + }; + + if (comptime bun.trait.isNumber(Type)) { + return JSC.JSValue.jsNumberWithType(Type, if (comptime Type != T) value.* else value); + } + + switch (comptime Type) { + void => return .undefined, + bool => return JSC.JSValue.jsBoolean(if (comptime Type != T) value.* else value), + *JSC.JSGlobalObject => return value.toJSValue(), + []const u8, [:0]const u8, [*:0]const u8, []u8, [:0]u8, [*:0]u8 => { + return bun.String.createUTF8ForJS(globalObject, value); + }, + []const bun.String => { + defer { + for (value) |out| { + out.deref(); + } + bun.default_allocator.free(value); + } + return bun.String.toJSArray(globalObject, value); + }, + JSC.JSValue => return if (Type != T) value.* else value, + + inline []const u16, []const u32, []const i16, []const i8, []const i32, []const f32 => { + var array = JSC.JSValue.createEmptyArray(globalObject, value.len); + for (value, 0..) |item, i| { + array.putIndex( + globalObject, + @truncate(i), + JSC.jsNumber(item), + ); + } + return array; + }, + + else => { + + // Recursion can stack overflow here + if (bun.trait.isSlice(Type)) { + const Child = comptime std.meta.Child(Type); + + var array = JSC.JSValue.createEmptyArray(globalObject, value.len); + for (value, 0..) |*item, i| { + const res = fromAny(globalObject, *Child, item, lifetime); + if (res == .zero) return .zero; + array.putIndex( + globalObject, + @truncate(i), + res, + ); + } + return array; + } + + if (comptime @hasDecl(Type, "toJSNewlyCreated") and @typeInfo(@TypeOf(@field(Type, "toJSNewlyCreated"))).@"fn".params.len == 2) { + return value.toJSNewlyCreated(globalObject); + } + + if (comptime @hasDecl(Type, "toJS") and @typeInfo(@TypeOf(@field(Type, "toJS"))).@"fn".params.len == 2) { + return value.toJS(globalObject); + } + + // must come after toJS check in case this enum implements its own serializer. + if (@typeInfo(Type) == .@"enum") { + // FIXME: creates non-normalized integers (e.g. u2), which + // aren't handled by `jsNumberWithType` rn + return JSC.JSValue.jsNumberWithType(u32, @as(u32, @intFromEnum(value))); + } + + @compileError("dont know how to convert " ++ @typeName(T) ++ " to JS"); + }, + } + } + + /// Print a JSValue to stdout; this is only meant for debugging purposes + pub fn dump(value: JSC.WebCore.JSValue, globalObject: *JSC.JSGlobalObject) !void { + var formatter = JSC.ConsoleObject.Formatter{ .globalThis = globalObject }; + defer formatter.deinit(); + try Output.errorWriter().print("{}\n", .{value.toFmt(globalObject, &formatter)}); + Output.flush(); + } + + pub const JSPropertyNameIterator = struct { + array: JSC.C.JSPropertyNameArrayRef, + count: u32, + i: u32 = 0, + + pub fn next(this: *JSPropertyNameIterator) ?JSC.C.JSStringRef { + if (this.i >= this.count) return null; + const i = this.i; + this.i += 1; + + return JSC.C.JSPropertyNameArrayGetNameAtIndex(this.array, i); + } + }; + pub const exposed_to_ffi = struct { pub const JSVALUE_TO_INT64 = JSValue.JSC__JSValue__toInt64; pub const JSVALUE_TO_UINT64 = JSValue.JSC__JSValue__toUInt64NoTruncate; pub const INT64_TO_JSVALUE = JSValue.JSC__JSValue__fromInt64NoTruncate; pub const UINT64_TO_JSVALUE = JSValue.JSC__JSValue__fromUInt64NoTruncate; }; -}; -pub const JSValueReprInt = JSC.JSValueReprInt; + pub const backing_int = @typeInfo(JSValue).@"enum".tag_type; +}; const std = @import("std"); const bun = @import("bun"); @@ -2676,8 +2800,6 @@ const JSMap = JSC.JSMap; const JSArrayIterator = JSC.JSArrayIterator; const JSFunction = JSC.JSFunction; const JSCell = JSC.JSCell; -const Exports = @import("./exports.zig"); -const JSNativeFn = JSC.JSNativeFn; const AnyPromise = JSC.AnyPromise; const DOMURL = JSC.DOMURL; @@ -2685,9 +2807,9 @@ const JestPrettyFormat = @import("../test/pretty_format.zig").JestPrettyFormat; const JSInternalPromise = JSC.JSInternalPromise; const ZigException = JSC.ZigException; const ArrayBuffer = JSC.ArrayBuffer; -const toJSHostFunction = JSC.toJSHostFunction; -const JSHostFunctionType = JSC.JSHostFunctionType; +const toJSHostFunction = JSC.toJSHostFn; +const JSHostFunctionType = JSC.JSHostFn; extern "c" fn AsyncContextFrame__withAsyncContextIfNeeded(global: *JSGlobalObject, callback: JSValue) JSValue; extern "c" fn Bun__JSValue__isAsyncContextFrame(value: JSValue) bool; -const FetchHeaders = JSC.FetchHeaders; +const FetchHeaders = bun.webcore.FetchHeaders; const Environment = bun.Environment; diff --git a/src/bun.js/bindings/Process.zig b/src/bun.js/bindings/Process.zig deleted file mode 100644 index df7c837621..0000000000 --- a/src/bun.js/bindings/Process.zig +++ /dev/null @@ -1,46 +0,0 @@ -/// Process information and control APIs -pub const Process = opaque { - var title_mutex = bun.Mutex{}; - - pub fn getTitle(_: *JSGlobalObject, title: *ZigString) callconv(.C) void { - title_mutex.lock(); - defer title_mutex.unlock(); - const str = bun.CLI.Bun__Node__ProcessTitle; - title.* = ZigString.init(str orelse "bun"); - } - - // TODO: https://github.com/nodejs/node/blob/master/deps/uv/src/unix/darwin-proctitle.c - pub fn setTitle(globalObject: *JSGlobalObject, newvalue: *ZigString) callconv(.C) JSValue { - title_mutex.lock(); - defer title_mutex.unlock(); - if (bun.CLI.Bun__Node__ProcessTitle) |_| bun.default_allocator.free(bun.CLI.Bun__Node__ProcessTitle.?); - bun.CLI.Bun__Node__ProcessTitle = newvalue.dupe(bun.default_allocator) catch bun.outOfMemory(); - return newvalue.toJS(globalObject); - } - - pub const getArgv = JSC.Node.Process.getArgv; - pub const getCwd = JSC.Node.Process.getCwd; - pub const setCwd = JSC.Node.Process.setCwd; - pub const exit = JSC.Node.Process.exit; - pub const getArgv0 = JSC.Node.Process.getArgv0; - pub const getExecPath = JSC.Node.Process.getExecPath; - pub const getExecArgv = JSC.Node.Process.getExecArgv; - - comptime { - @export(&getTitle, .{ .name = "Bun__Process__getTitle" }); - @export(&setTitle, .{ .name = "Bun__Process__setTitle" }); - @export(&getArgv, .{ .name = "Bun__Process__getArgv" }); - @export(&getCwd, .{ .name = "Bun__Process__getCwd" }); - @export(&setCwd, .{ .name = "Bun__Process__setCwd" }); - @export(&exit, .{ .name = "Bun__Process__exit" }); - @export(&getArgv0, .{ .name = "Bun__Process__getArgv0" }); - @export(&getExecPath, .{ .name = "Bun__Process__getExecPath" }); - @export(&getExecArgv, .{ .name = "Bun__Process__getExecArgv" }); - } -}; - -const bun = @import("bun"); -const JSC = bun.JSC; -const JSGlobalObject = JSC.JSGlobalObject; -const JSValue = JSC.JSValue; -const ZigString = JSC.ZigString; diff --git a/src/bun.js/bindings/SystemError.zig b/src/bun.js/bindings/SystemError.zig index e8cdfd4560..dd6e985f88 100644 --- a/src/bun.js/bindings/SystemError.zig +++ b/src/bun.js/bindings/SystemError.zig @@ -28,7 +28,7 @@ pub const SystemError = extern struct { extern fn SystemError__toErrorInstance(this: *const SystemError, global: *JSGlobalObject) JSValue; extern fn SystemError__toErrorInstanceWithInfoObject(this: *const SystemError, global: *JSC.JSGlobalObject) JSValue; - pub fn getErrno(this: *const SystemError) bun.C.E { + pub fn getErrno(this: *const SystemError) bun.sys.E { // The inverse in bun.sys.Error.toSystemError() return @enumFromInt(this.errno * -1); } diff --git a/src/bun.js/bindings/VM.zig b/src/bun.js/bindings/VM.zig index 3e4e191bef..cb65b5a87a 100644 --- a/src/bun.js/bindings/VM.zig +++ b/src/bun.js/bindings/VM.zig @@ -30,8 +30,8 @@ pub const VM = opaque { return JSC__VM__isJITEnabled(); } - /// deprecated in favor of getAPILock to avoid an annoying callback wrapper extern fn JSC__VM__holdAPILock(this: *VM, ctx: ?*anyopaque, callback: *const fn (ctx: ?*anyopaque) callconv(.C) void) void; + /// deprecated in favor of getAPILock to avoid an annoying callback wrapper pub fn holdAPILock(this: *VM, ctx: ?*anyopaque, callback: *const fn (ctx: ?*anyopaque) callconv(.C) void) void { JSC__VM__holdAPILock(this, ctx, callback); } diff --git a/src/bun.js/bindings/ZigString.zig b/src/bun.js/bindings/ZigString.zig index de26ea933a..533df6aeb1 100644 --- a/src/bun.js/bindings/ZigString.zig +++ b/src/bun.js/bindings/ZigString.zig @@ -48,7 +48,7 @@ pub const ZigString = extern struct { pub fn encodeWithAllocator(this: ZigString, allocator: std.mem.Allocator, encoding: JSC.Node.Encoding) []u8 { return switch (this.as()) { inline else => |repr| switch (encoding) { - inline else => |enc| JSC.WebCore.Encoder.constructFrom(std.meta.Child(@TypeOf(repr)), repr, allocator, enc), + inline else => |enc| JSC.WebCore.encoding.constructFrom(std.meta.Child(@TypeOf(repr)), repr, allocator, enc), }, }; } @@ -235,7 +235,7 @@ pub const ZigString = extern struct { return this.len * 2; } - return JSC.WebCore.Encoder.byteLengthU8(this.slice().ptr, this.slice().len, .utf16le); + return JSC.WebCore.encoding.byteLengthU8(this.slice().ptr, this.slice().len, .utf16le); } pub fn latin1ByteLength(this: ZigString) usize { @@ -257,7 +257,7 @@ pub const ZigString = extern struct { return strings.elementLengthUTF16IntoUTF8([]const u16, this.utf16SliceAligned()); } - return JSC.WebCore.Encoder.byteLengthU8(this.slice().ptr, this.slice().len, .utf8); + return bun.webcore.encoding.byteLengthU8(this.slice().ptr, this.slice().len, .utf8); } pub fn toOwnedSlice(this: ZigString, allocator: std.mem.Allocator) OOM![]u8 { diff --git a/src/bun.js/bindings/bindings.zig b/src/bun.js/bindings/bindings.zig deleted file mode 100644 index 78e01f03d9..0000000000 --- a/src/bun.js/bindings/bindings.zig +++ /dev/null @@ -1,402 +0,0 @@ -const std = @import("std"); -const bun = @import("bun"); -const string = bun.string; -const Output = bun.Output; -const C_API = bun.JSC.C; -const StringPointer = @import("../../api/schema.zig").Api.StringPointer; -const Exports = @import("./exports.zig"); -const strings = bun.strings; -const ErrorableZigString = Exports.ErrorableZigString; -const ErrorableResolvedSource = Exports.ErrorableResolvedSource; -const ZigException = Exports.ZigException; -const ZigStackTrace = Exports.ZigStackTrace; -const ArrayBuffer = @import("../base.zig").ArrayBuffer; -const JSC = bun.JSC; - -const FFI = @import("./FFI.zig"); -const NullableAllocator = bun.NullableAllocator; -const MutableString = bun.MutableString; -const JestPrettyFormat = @import("../test/pretty_format.zig").JestPrettyFormat; -const String = bun.String; -const ErrorableString = JSC.ErrorableString; -const JSError = bun.JSError; -const OOM = bun.OOM; -const napi = @import("../../napi/napi.zig"); - -pub extern const JSC__JSObject__maxInlineCapacity: c_uint; - -pub const JSObject = @import("./JSObject.zig").JSObject; -pub const CachedBytecode = @import("./CachedBytecode.zig").CachedBytecode; -pub const DOMURL = @import("./DOMURL.zig").DOMURL; -pub const DOMFormData = @import("./DOMFormData.zig").DOMFormData; -pub const FetchHeaders = @import("./FetchHeaders.zig").FetchHeaders; -pub const ZigString = @import("./ZigString.zig").ZigString; -pub const SystemError = @import("./SystemError.zig").SystemError; -pub const JSUint8Array = @import("./JSUint8Array.zig").JSUint8Array; -pub const JSCell = @import("./JSCell.zig").JSCell; -pub const JSString = @import("./JSString.zig").JSString; -pub const GetterSetter = @import("./GetterSetter.zig").GetterSetter; -pub const CustomGetterSetter = @import("./CustomGetterSetter.zig").CustomGetterSetter; -pub const JSPromiseRejectionOperation = @import("./JSPromiseRejectionOperation.zig").JSPromiseRejectionOperation; -pub const CommonAbortReason = @import("./CommonAbortReason.zig").CommonAbortReason; -pub const SourceType = @import("./SourceType.zig").SourceType; -pub const AbortSignal = @import("./AbortSignal.zig").AbortSignal; -pub const JSPromise = @import("./JSPromise.zig").JSPromise; -pub const JSInternalPromise = @import("./JSInternalPromise.zig").JSInternalPromise; -pub const AnyPromise = @import("./AnyPromise.zig").AnyPromise; -pub const JSModuleLoader = @import("./JSModuleLoader.zig").JSModuleLoader; -pub const JSFunction = @import("./JSFunction.zig").JSFunction; -pub const JSGlobalObject = @import("./JSGlobalObject.zig").JSGlobalObject; -pub const CommonStrings = @import("./CommonStrings.zig").CommonStrings; -pub const JSArrayIterator = @import("./JSArrayIterator.zig").JSArrayIterator; -pub const JSMap = @import("./JSMap.zig").JSMap; -pub const JSValue = @import("./JSValue.zig").JSValue; -pub const VM = @import("./VM.zig").VM; -pub const CallFrame = @import("./CallFrame.zig").CallFrame; -pub const EncodedJSValue = @import("./EncodedJSValue.zig").EncodedJSValue; -pub const JSArray = @import("./JSArray.zig").JSArray; -pub const URL = @import("./URL.zig").URL; -pub const URLSearchParams = @import("./URLSearchParams.zig").URLSearchParams; -pub const WTF = @import("./WTF.zig").WTF; -pub const ScriptExecutionStatus = @import("./ScriptExecutionStatus.zig").ScriptExecutionStatus; -pub const DeferredError = @import("./DeferredError.zig").DeferredError; -pub const Sizes = @import("./sizes.zig"); -pub const JSRef = @import("./JSRef.zig").JSRef; -pub fn PromiseCallback(comptime Type: type, comptime CallbackFunction: fn (*Type, *JSGlobalObject, []const JSValue) anyerror!JSValue) type { - return struct { - pub fn callback( - ctx: ?*anyopaque, - globalThis: *JSGlobalObject, - arguments: [*]const JSValue, - arguments_len: usize, - ) callconv(.C) JSValue { - return CallbackFunction(@as(*Type, @ptrCast(@alignCast(ctx.?))), globalThis, arguments[0..arguments_len]) catch |err| brk: { - break :brk ZigString.init(bun.asByteSlice(@errorName(err))).toErrorInstance(globalThis); - }; - } - }.callback; -} - -pub const JSNativeFn = JSHostZigFunction; - -pub const JSValueReprInt = i64; - -pub const JSHostFunctionType = fn (*JSGlobalObject, *CallFrame) callconv(JSC.conv) JSValue; -pub const JSHostFunctionTypeWithCCallConvForAssertions = fn (*JSGlobalObject, *CallFrame) callconv(.C) JSValue; -pub const JSHostFunctionPtr = *const JSHostFunctionType; -pub const JSHostZigFunction = fn (*JSGlobalObject, *CallFrame) bun.JSError!JSValue; -pub fn JSHostZigFunctionWithContext(comptime ContextType: type) type { - return fn (*ContextType, *JSGlobalObject, *CallFrame) bun.JSError!JSValue; -} -pub fn JSHostFunctionTypeWithContext(comptime ContextType: type) type { - return fn (*ContextType, *JSC.JSGlobalObject, *JSC.CallFrame) callconv(JSC.conv) JSC.JSValue; -} - -pub fn toJSHostFunction(comptime Function: JSHostZigFunction) JSC.JSHostFunctionType { - return struct { - pub fn function(globalThis: *JSC.JSGlobalObject, callframe: *JSC.CallFrame) callconv(JSC.conv) JSC.JSValue { - if (bun.Environment.allow_assert and bun.Environment.is_canary) { - const value = Function(globalThis, callframe) catch |err| switch (err) { - error.JSError => .zero, - error.OutOfMemory => globalThis.throwOutOfMemoryValue(), - }; - if (comptime bun.Environment.isDebug) { - if (value != .zero) { - if (globalThis.hasException()) { - var formatter = JSC.ConsoleObject.Formatter{ .globalThis = globalThis }; - defer formatter.deinit(); - bun.Output.err("Assertion failed", - \\Native function returned a non-zero JSValue while an exception is pending - \\ - \\ fn: {s} - \\ value: {} - \\ - , .{ - &Function, // use `(lldb) image lookup --address 0x1ec4` to discover what function failed - value.toFmt(&formatter), - }); - Output.flush(); - } - } - } - bun.assert((value == .zero) == globalThis.hasException()); - return value; - } - return @call(.always_inline, Function, .{ globalThis, callframe }) catch |err| switch (err) { - error.JSError => .zero, - error.OutOfMemory => globalThis.throwOutOfMemoryValue(), - }; - } - }.function; -} -pub fn toJSHostFunctionWithContext(comptime ContextType: type, comptime Function: JSHostZigFunctionWithContext(ContextType)) JSHostFunctionTypeWithContext(ContextType) { - return struct { - pub fn function(ctx: *ContextType, globalThis: *JSC.JSGlobalObject, callframe: *JSC.CallFrame) callconv(JSC.conv) JSC.JSValue { - if (bun.Environment.allow_assert and bun.Environment.is_canary) { - const value = Function(ctx, globalThis, callframe) catch |err| switch (err) { - error.JSError => .zero, - error.OutOfMemory => globalThis.throwOutOfMemoryValue(), - }; - if (comptime bun.Environment.isDebug) { - if (value != .zero) { - if (globalThis.hasException()) { - var formatter = JSC.ConsoleObject.Formatter{ .globalThis = globalThis }; - defer formatter.deinit(); - bun.Output.err("Assertion failed", - \\Native function returned a non-zero JSValue while an exception is pending - \\ - \\ fn: {s} - \\ value: {} - \\ - , .{ - &Function, // use `(lldb) image lookup --address 0x1ec4` to discover what function failed - value.toFmt(&formatter), - }); - Output.flush(); - } - } - } - bun.assert((value == .zero) == globalThis.hasException()); - return value; - } - return @call(.always_inline, Function, .{ ctx, globalThis, callframe }) catch |err| switch (err) { - error.JSError => .zero, - error.OutOfMemory => globalThis.throwOutOfMemoryValue(), - }; - } - }.function; -} - -pub fn toJSHostValue(globalThis: *JSGlobalObject, value: error{ OutOfMemory, JSError }!JSValue) JSValue { - if (bun.Environment.allow_assert and bun.Environment.is_canary) { - const normal = value catch |err| switch (err) { - error.JSError => .zero, - error.OutOfMemory => globalThis.throwOutOfMemoryValue(), - }; - bun.assert((normal == .zero) == globalThis.hasException()); - return normal; - } - return value catch |err| switch (err) { - error.JSError => .zero, - error.OutOfMemory => globalThis.throwOutOfMemoryValue(), - }; -} - -const ParsedHostFunctionErrorSet = struct { - OutOfMemory: bool = false, - JSError: bool = false, -}; - -inline fn parseErrorSet(T: type, errors: []const std.builtin.Type.Error) ParsedHostFunctionErrorSet { - return comptime brk: { - var errs: ParsedHostFunctionErrorSet = .{}; - for (errors) |err| { - if (!@hasField(ParsedHostFunctionErrorSet, err.name)) { - @compileError("Return value from host function '" ++ @typeInfo(T) ++ "' can not contain error '" ++ err.name ++ "'"); - } - @field(errs, err.name) = true; - } - break :brk errs; - }; -} - -const DeinitFunction = *const fn (ctx: *anyopaque, buffer: [*]u8, len: usize) callconv(.C) void; - -const private = struct { - pub extern fn Bun__CreateFFIFunctionWithDataValue( - *JSGlobalObject, - ?*const ZigString, - argCount: u32, - function: JSHostFunctionPtr, - strong: bool, - data: *anyopaque, - ) JSValue; - pub extern fn Bun__CreateFFIFunction( - globalObject: *JSGlobalObject, - symbolName: ?*const ZigString, - argCount: u32, - functionPointer: JSHostFunctionPtr, - strong: bool, - ) *anyopaque; - - pub extern fn Bun__CreateFFIFunctionValue( - globalObject: *JSGlobalObject, - symbolName: ?*const ZigString, - argCount: u32, - functionPointer: JSHostFunctionPtr, - strong: bool, - add_ptr_field: bool, - inputFunctionPtr: ?*anyopaque, - ) JSValue; - - pub extern fn Bun__untrackFFIFunction( - globalObject: *JSGlobalObject, - function: JSValue, - ) bool; - - pub extern fn Bun__FFIFunction_getDataPtr(JSValue) ?*anyopaque; - pub extern fn Bun__FFIFunction_setDataPtr(JSValue, ?*anyopaque) void; -}; - -pub fn NewFunction( - globalObject: *JSGlobalObject, - symbolName: ?*const ZigString, - argCount: u32, - comptime functionPointer: anytype, - strong: bool, -) JSValue { - if (@TypeOf(functionPointer) == JSC.JSHostFunctionType) { - return NewRuntimeFunction(globalObject, symbolName, argCount, functionPointer, strong, false, null); - } - return NewRuntimeFunction(globalObject, symbolName, argCount, toJSHostFunction(functionPointer), strong, false, null); -} - -pub fn createCallback( - globalObject: *JSGlobalObject, - symbolName: ?*const ZigString, - argCount: u32, - comptime functionPointer: anytype, -) JSValue { - if (@TypeOf(functionPointer) == JSC.JSHostFunctionType) { - return NewRuntimeFunction(globalObject, symbolName, argCount, functionPointer, false, false, null); - } - return NewRuntimeFunction(globalObject, symbolName, argCount, toJSHostFunction(functionPointer), false, false, null); -} - -pub fn NewRuntimeFunction( - globalObject: *JSGlobalObject, - symbolName: ?*const ZigString, - argCount: u32, - functionPointer: JSHostFunctionPtr, - strong: bool, - add_ptr_property: bool, - inputFunctionPtr: ?*anyopaque, -) JSValue { - JSC.markBinding(@src()); - return private.Bun__CreateFFIFunctionValue(globalObject, symbolName, argCount, functionPointer, strong, add_ptr_property, inputFunctionPtr); -} - -pub fn getFunctionData(function: JSValue) ?*anyopaque { - JSC.markBinding(@src()); - return private.Bun__FFIFunction_getDataPtr(function); -} - -pub fn setFunctionData(function: JSValue, value: ?*anyopaque) void { - JSC.markBinding(@src()); - return private.Bun__FFIFunction_setDataPtr(function, value); -} - -pub fn NewFunctionWithData( - globalObject: *JSGlobalObject, - symbolName: ?*const ZigString, - argCount: u32, - comptime functionPointer: JSC.JSHostZigFunction, - strong: bool, - data: *anyopaque, -) JSValue { - JSC.markBinding(@src()); - return private.Bun__CreateFFIFunctionWithDataValue( - globalObject, - symbolName, - argCount, - toJSHostFunction(functionPointer), - strong, - data, - ); -} - -pub fn untrackFunction( - globalObject: *JSGlobalObject, - value: JSValue, -) bool { - JSC.markBinding(@src()); - return private.Bun__untrackFFIFunction(globalObject, value); -} - -// DOMCall Fields -const Bun = JSC.API.Bun; -pub const __DOMCall_ptr = Bun.FFIObject.dom_call; -pub const __DOMCall__reader_u8 = Bun.FFIObject.Reader.DOMCalls.u8; -pub const __DOMCall__reader_u16 = Bun.FFIObject.Reader.DOMCalls.u16; -pub const __DOMCall__reader_u32 = Bun.FFIObject.Reader.DOMCalls.u32; -pub const __DOMCall__reader_ptr = Bun.FFIObject.Reader.DOMCalls.ptr; -pub const __DOMCall__reader_i8 = Bun.FFIObject.Reader.DOMCalls.i8; -pub const __DOMCall__reader_i16 = Bun.FFIObject.Reader.DOMCalls.i16; -pub const __DOMCall__reader_i32 = Bun.FFIObject.Reader.DOMCalls.i32; -pub const __DOMCall__reader_f32 = Bun.FFIObject.Reader.DOMCalls.f32; -pub const __DOMCall__reader_f64 = Bun.FFIObject.Reader.DOMCalls.f64; -pub const __DOMCall__reader_i64 = Bun.FFIObject.Reader.DOMCalls.i64; -pub const __DOMCall__reader_u64 = Bun.FFIObject.Reader.DOMCalls.u64; -pub const __DOMCall__reader_intptr = Bun.FFIObject.Reader.DOMCalls.intptr; -pub const DOMCalls = &.{ - .{ .ptr = Bun.FFIObject.dom_call }, - Bun.FFIObject.Reader.DOMCalls, -}; - -extern "c" fn JSCInitialize(env: [*]const [*:0]u8, count: usize, cb: *const fn ([*]const u8, len: usize) callconv(.C) void, eval_mode: bool) void; -pub fn initialize(eval_mode: bool) void { - JSC.markBinding(@src()); - bun.analytics.Features.jsc += 1; - JSCInitialize( - std.os.environ.ptr, - std.os.environ.len, - onJSCInvalidEnvVar, - eval_mode, - ); -} - -pub fn onJSCInvalidEnvVar(name: [*]const u8, len: usize) callconv(.C) void { - Output.prettyErrorln( - \\error: invalid JSC environment variable - \\ - \\ {s} - \\ - \\For a list of options, see this file: - \\ - \\ https://github.com/oven-sh/webkit/blob/main/Source/JavaScriptCore/runtime/OptionsList.h - \\ - \\Environment variables must be prefixed with "BUN_JSC_". This code runs before .env files are loaded, so those won't work here. - \\ - \\Warning: options change between releases of Bun and WebKit without notice. This is not a stable API, you should not rely on it beyond debugging something, and it may be removed entirely in a future version of Bun. - , - .{name[0..len]}, - ); - bun.Global.exit(1); -} - -/// Returns null on error. Use windows API to lookup the actual error. -/// The reason this function is in zig is so that we can use our own utf16-conversion functions. -/// -/// Using characters16() does not seem to always have the sentinel. or something else -/// broke when I just used it. Not sure. ... but this works! -fn @"windows process.dlopen"(str: *bun.String) callconv(.C) ?*anyopaque { - if (comptime !bun.Environment.isWindows) { - @compileError(unreachable); - } - - var buf: bun.WPathBuffer = undefined; - const data = switch (str.encoding()) { - .utf8 => bun.strings.convertUTF8toUTF16InBuffer(&buf, str.utf8()), - .utf16 => brk: { - @memcpy(buf[0..str.length()], str.utf16()); - break :brk buf[0..str.length()]; - }, - .latin1 => brk: { - bun.strings.copyU8IntoU16(&buf, str.latin1()); - break :brk buf[0..str.length()]; - }, - }; - buf[data.len] = 0; - const LOAD_WITH_ALTERED_SEARCH_PATH = 0x00000008; - return bun.windows.kernel32.LoadLibraryExW(buf[0..data.len :0].ptr, null, LOAD_WITH_ALTERED_SEARCH_PATH); -} - -comptime { - // this file is gennerated, but cant be placed in the build/debug/codegen folder - // because zig will complain about outside-of-module stuff - _ = @import("./GeneratedJS2Native.zig"); - - if (bun.Environment.isWindows) { - @export(&@"windows process.dlopen", .{ .name = "Bun__LoadLibraryBunString" }); - } -} diff --git a/src/bun.js/bindings/exports.zig b/src/bun.js/bindings/exports.zig deleted file mode 100644 index 549e81cead..0000000000 --- a/src/bun.js/bindings/exports.zig +++ /dev/null @@ -1,78 +0,0 @@ -const JSC = bun.JSC; - -const bun = @import("bun"); -const std = @import("std"); -const strings = bun.strings; -const default_allocator = bun.default_allocator; -const JSGlobalObject = JSC.JSGlobalObject; -const ZigString = JSC.ZigString; -const string = bun.string; -const JSValue = JSC.JSValue; -const String = bun.String; -const JestPrettyFormat = @import("../test/pretty_format.zig").JestPrettyFormat; - -// Re-export all the split-out types -pub const Exception = @import("Exception.zig").Exception; -pub const ErrorCode = @import("ErrorCode.zig").ErrorCode; -pub const ZigErrorType = @import("ZigErrorType.zig").ZigErrorType; -pub const NodePath = JSC.Node.Path; - -// Re-export all the sink types -pub const JSArrayBufferSink = JSC.WebCore.ArrayBufferSink.JSSink; -pub const JSHTTPSResponseSink = JSC.WebCore.HTTPSResponseSink.JSSink; -pub const JSHTTPResponseSink = JSC.WebCore.HTTPResponseSink.JSSink; -pub const JSFileSink = JSC.WebCore.FileSink.JSSink; -pub const JSNetworkSink = JSC.WebCore.NetworkSink.JSSink; - -// Re-export WebSocket client types -pub const WebSocketHTTPClient = @import("../../http/websocket_http_client.zig").WebSocketHTTPClient; -pub const WebSocketHTTPSClient = @import("../../http/websocket_http_client.zig").WebSocketHTTPSClient; -pub const WebSocketClient = @import("../../http/websocket_http_client.zig").WebSocketClient; -pub const WebSocketClientTLS = @import("../../http/websocket_http_client.zig").WebSocketClientTLS; - -comptime { - WebSocketClient.exportAll(); - WebSocketClientTLS.exportAll(); - WebSocketHTTPClient.exportAll(); - WebSocketHTTPSClient.exportAll(); -} - -// Re-export the Errorable type and common instances -pub const Errorable = @import("Errorable.zig").Errorable; -pub const ResolvedSource = @import("ResolvedSource.zig").ResolvedSource; -pub const SourceProvider = @import("SourceProvider.zig").SourceProvider; - -// Re-export error and event types -pub const JSErrorCode = @import("JSErrorCode.zig").JSErrorCode; -pub const EventType = @import("EventType.zig").EventType; -pub const JSRuntimeType = @import("JSRuntimeType.zig").JSRuntimeType; -pub const ZigStackFrameCode = @import("ZigStackFrameCode.zig").ZigStackFrameCode; - -// Re-export Process -pub const Process = @import("Process.zig").Process; -comptime { - _ = Process.getTitle; - _ = Process.setTitle; -} - -// Re-export stack trace related types -pub const ZigStackTrace = @import("ZigStackTrace.zig").ZigStackTrace; -pub const ZigStackFrame = @import("ZigStackFrame.zig").ZigStackFrame; -pub const ZigStackFramePosition = @import("ZigStackFramePosition.zig").ZigStackFramePosition; -pub const ZigException = @import("ZigException.zig").ZigException; - -pub const ErrorableResolvedSource = Errorable(ResolvedSource); -pub const ErrorableZigString = Errorable(ZigString); -pub const ErrorableJSValue = Errorable(JSValue); -pub const ErrorableString = Errorable(String); -pub const ConsoleObject = @import("../ConsoleObject.zig"); - -// Re-export type aliases -pub const BunTimer = JSC.API.Bun.Timer; -pub const Formatter = ConsoleObject.Formatter; -pub const HTTPServerRequestContext = JSC.API.HTTPServer.RequestContext; -pub const HTTPSSLServerRequestContext = JSC.API.HTTPSServer.RequestContext; -pub const HTTPDebugServerRequestContext = JSC.API.DebugHTTPServer.RequestContext; -pub const HTTPDebugSSLServerRequestContext = JSC.API.DebugHTTPSServer.RequestContext; -pub const BodyValueBuffererContext = JSC.WebCore.BodyValueBufferer; -pub const TestScope = @import("../test/jest.zig").TestScope; diff --git a/src/bun.js/bindings/generated_classes_list.zig b/src/bun.js/bindings/generated_classes_list.zig index 1d0cc94c9c..44de989749 100644 --- a/src/bun.js/bindings/generated_classes_list.zig +++ b/src/bun.js/bindings/generated_classes_list.zig @@ -1,86 +1,89 @@ const bun = @import("bun"); -const JSC = bun.JSC; +const jsc = bun.JSC; +const api = bun.api; +const node = api.node; +const webcore = bun.webcore; pub const Classes = struct { - pub const Blob = JSC.WebCore.Blob; - pub const HTMLRewriter = JSC.Cloudflare.HTMLRewriter; - pub const Element = JSC.Cloudflare.Element; - pub const Comment = JSC.Cloudflare.Comment; - pub const TextChunk = JSC.Cloudflare.TextChunk; - pub const DocType = JSC.Cloudflare.DocType; - pub const DocEnd = JSC.Cloudflare.DocEnd; - pub const EndTag = JSC.Cloudflare.EndTag; - pub const AttributeIterator = JSC.Cloudflare.AttributeIterator; - pub const CryptoHasher = JSC.API.Bun.Crypto.CryptoHasher; - pub const Expect = JSC.Expect.Expect; - pub const ExpectAny = JSC.Expect.ExpectAny; - pub const ExpectAnything = JSC.Expect.ExpectAnything; - pub const ExpectCustomAsymmetricMatcher = JSC.Expect.ExpectCustomAsymmetricMatcher; - pub const ExpectMatcherContext = JSC.Expect.ExpectMatcherContext; - pub const ExpectMatcherUtils = JSC.Expect.ExpectMatcherUtils; - pub const ExpectStatic = JSC.Expect.ExpectStatic; - pub const ExpectCloseTo = JSC.Expect.ExpectCloseTo; - pub const ExpectObjectContaining = JSC.Expect.ExpectObjectContaining; - pub const ExpectStringContaining = JSC.Expect.ExpectStringContaining; - pub const ExpectStringMatching = JSC.Expect.ExpectStringMatching; - pub const ExpectArrayContaining = JSC.Expect.ExpectArrayContaining; - pub const FileSystemRouter = JSC.API.FileSystemRouter; - pub const Glob = JSC.API.Glob; - pub const ShellInterpreter = JSC.API.Shell.Interpreter; - pub const ParsedShellScript = JSC.API.Shell.ParsedShellScript; - pub const Bundler = JSC.API.JSBundler; + pub const Blob = webcore.Blob; + pub const HTMLRewriter = api.HTMLRewriter.HTMLRewriter; + pub const Element = api.HTMLRewriter.Element; + pub const Comment = api.HTMLRewriter.Comment; + pub const TextChunk = api.HTMLRewriter.TextChunk; + pub const DocType = api.HTMLRewriter.DocType; + pub const DocEnd = api.HTMLRewriter.DocEnd; + pub const EndTag = api.HTMLRewriter.EndTag; + pub const AttributeIterator = api.HTMLRewriter.AttributeIterator; + pub const CryptoHasher = api.Bun.Crypto.CryptoHasher; + pub const Expect = jsc.Expect.Expect; + pub const ExpectAny = jsc.Expect.ExpectAny; + pub const ExpectAnything = jsc.Expect.ExpectAnything; + pub const ExpectCustomAsymmetricMatcher = jsc.Expect.ExpectCustomAsymmetricMatcher; + pub const ExpectMatcherContext = jsc.Expect.ExpectMatcherContext; + pub const ExpectMatcherUtils = jsc.Expect.ExpectMatcherUtils; + pub const ExpectStatic = jsc.Expect.ExpectStatic; + pub const ExpectCloseTo = jsc.Expect.ExpectCloseTo; + pub const ExpectObjectContaining = jsc.Expect.ExpectObjectContaining; + pub const ExpectStringContaining = jsc.Expect.ExpectStringContaining; + pub const ExpectStringMatching = jsc.Expect.ExpectStringMatching; + pub const ExpectArrayContaining = jsc.Expect.ExpectArrayContaining; + pub const FileSystemRouter = api.FileSystemRouter; + pub const Glob = api.Glob; + pub const ShellInterpreter = api.Shell.Interpreter; + pub const ParsedShellScript = api.Shell.ParsedShellScript; + pub const Bundler = api.JSBundler; pub const JSBundler = Bundler; - pub const Transpiler = JSC.API.JSTranspiler; + pub const Transpiler = api.JSTranspiler; pub const JSTranspiler = Transpiler; - pub const Listener = JSC.API.Listener; - pub const MatchedRoute = JSC.API.MatchedRoute; - pub const NodeJSFS = JSC.Node.NodeJSFS; - pub const Request = JSC.WebCore.Request; - pub const Response = JSC.WebCore.Response; - pub const MD4 = JSC.API.Bun.Crypto.MD4; - pub const MD5 = JSC.API.Bun.Crypto.MD5; - pub const SHA1 = JSC.API.Bun.Crypto.SHA1; - pub const SHA224 = JSC.API.Bun.Crypto.SHA224; - pub const SHA256 = JSC.API.Bun.Crypto.SHA256; - pub const SHA384 = JSC.API.Bun.Crypto.SHA384; - pub const SHA512 = JSC.API.Bun.Crypto.SHA512; - pub const SHA512_256 = JSC.API.Bun.Crypto.SHA512_256; - pub const ServerWebSocket = JSC.API.ServerWebSocket; - pub const Subprocess = JSC.API.Bun.Subprocess; - pub const ResourceUsage = JSC.API.Bun.Subprocess.ResourceUsage; - pub const TCPSocket = JSC.API.TCPSocket; - pub const TLSSocket = JSC.API.TLSSocket; - pub const UDPSocket = JSC.API.UDPSocket; - pub const SocketAddress = JSC.API.SocketAddress; - pub const TextDecoder = JSC.WebCore.TextDecoder; - pub const Timeout = JSC.API.Bun.Timer.TimeoutObject; - pub const Immediate = JSC.API.Bun.Timer.ImmediateObject; - pub const BuildArtifact = JSC.API.BuildArtifact; - pub const BuildMessage = JSC.BuildMessage; - pub const ResolveMessage = JSC.ResolveMessage; - pub const FSWatcher = JSC.Node.FSWatcher; - pub const StatWatcher = JSC.Node.StatWatcher; - pub const HTTPServer = JSC.API.HTTPServer; - pub const HTTPSServer = JSC.API.HTTPSServer; - pub const DebugHTTPServer = JSC.API.DebugHTTPServer; - pub const DebugHTTPSServer = JSC.API.DebugHTTPSServer; - pub const Crypto = JSC.WebCore.Crypto; - pub const FFI = JSC.FFI; - pub const H2FrameParser = JSC.API.H2FrameParser; - pub const FileInternalReadableStreamSource = JSC.WebCore.FileReader.Source; - pub const BlobInternalReadableStreamSource = JSC.WebCore.ByteBlobLoader.Source; - pub const BytesInternalReadableStreamSource = JSC.WebCore.ByteStream.Source; - pub const PostgresSQLConnection = JSC.Postgres.PostgresSQLConnection; - pub const PostgresSQLQuery = JSC.Postgres.PostgresSQLQuery; - pub const TextEncoderStreamEncoder = JSC.WebCore.TextEncoderStreamEncoder; - pub const NativeZlib = JSC.API.NativeZlib; - pub const NativeBrotli = JSC.API.NativeBrotli; - pub const NodeHTTPResponse = JSC.API.NodeHTTPResponse; + pub const Listener = api.Listener; + pub const MatchedRoute = api.MatchedRoute; + pub const NodeJSFS = node.fs.Binding; + pub const Request = webcore.Request; + pub const Response = webcore.Response; + pub const MD4 = api.Bun.Crypto.MD4; + pub const MD5 = api.Bun.Crypto.MD5; + pub const SHA1 = api.Bun.Crypto.SHA1; + pub const SHA224 = api.Bun.Crypto.SHA224; + pub const SHA256 = api.Bun.Crypto.SHA256; + pub const SHA384 = api.Bun.Crypto.SHA384; + pub const SHA512 = api.Bun.Crypto.SHA512; + pub const SHA512_256 = api.Bun.Crypto.SHA512_256; + pub const ServerWebSocket = api.ServerWebSocket; + pub const Subprocess = api.Subprocess; + pub const ResourceUsage = api.Subprocess.ResourceUsage; + pub const TCPSocket = api.TCPSocket; + pub const TLSSocket = api.TLSSocket; + pub const UDPSocket = api.UDPSocket; + pub const SocketAddress = api.SocketAddress; + pub const TextDecoder = webcore.TextDecoder; + pub const Timeout = api.Timer.TimeoutObject; + pub const Immediate = api.Timer.ImmediateObject; + pub const BuildArtifact = api.BuildArtifact; + pub const BuildMessage = api.BuildMessage; + pub const ResolveMessage = api.ResolveMessage; + pub const FSWatcher = node.fs.Watcher; + pub const StatWatcher = api.node.fs.StatWatcher; + pub const HTTPServer = api.HTTPServer; + pub const HTTPSServer = api.HTTPSServer; + pub const DebugHTTPServer = api.DebugHTTPServer; + pub const DebugHTTPSServer = api.DebugHTTPSServer; + pub const Crypto = webcore.Crypto; + pub const FFI = api.FFI; + pub const H2FrameParser = api.H2FrameParser; + pub const FileInternalReadableStreamSource = webcore.FileReader.Source; + pub const BlobInternalReadableStreamSource = webcore.ByteBlobLoader.Source; + pub const BytesInternalReadableStreamSource = webcore.ByteStream.Source; + pub const PostgresSQLConnection = api.Postgres.PostgresSQLConnection; + pub const PostgresSQLQuery = api.Postgres.PostgresSQLQuery; + pub const TextEncoderStreamEncoder = webcore.TextEncoderStreamEncoder; + pub const NativeZlib = api.NativeZlib; + pub const NativeBrotli = api.NativeBrotli; + pub const NodeHTTPResponse = api.NodeHTTPResponse; pub const FrameworkFileSystemRouter = bun.bake.FrameworkRouter.JSFrameworkRouter; - pub const DNSResolver = JSC.DNS.DNSResolver; + pub const DNSResolver = api.DNS.DNSResolver; - pub const S3Client = JSC.WebCore.S3Client; - pub const S3Stat = JSC.WebCore.S3Stat; - pub const HTMLBundle = JSC.API.HTMLBundle; - pub const RedisClient = JSC.API.Valkey; + pub const S3Client = webcore.S3Client; + pub const S3Stat = webcore.S3Stat; + pub const HTMLBundle = api.HTMLBundle; + pub const RedisClient = api.Valkey; }; diff --git a/src/bun.js/bindings/sizes.zig b/src/bun.js/bindings/sizes.zig index 39170e8980..de8a8848db 100644 --- a/src/bun.js/bindings/sizes.zig +++ b/src/bun.js/bindings/sizes.zig @@ -6,8 +6,3 @@ //! memory layout is not guaranteed by the compiler. pub const Bun_FFI_PointerOffsetToArgumentsList = 6; pub const Bun_FFI_PointerOffsetToTypedArrayVector = 16; -pub const Bun_CallFrame__callee = 3; -pub const Bun_CallFrame__argumentCountIncludingThis = 4; -pub const Bun_CallFrame__thisArgument = 5; -pub const Bun_CallFrame__firstArgument = 6; -pub const Bun_CallFrame__align = 8; diff --git a/src/bun.js/config.zig b/src/bun.js/config.zig index 19436f69f8..40a9e10268 100644 --- a/src/bun.js/config.zig +++ b/src/bun.js/config.zig @@ -7,7 +7,7 @@ const strings = bun.strings; const MutableString = bun.MutableString; const stringZ = bun.stringZ; const default_allocator = bun.default_allocator; -const C = bun.C; + const std = @import("std"); const Fs = @import("../fs.zig"); diff --git a/src/bun.js/event_loop.zig b/src/bun.js/event_loop.zig index 44c57d992b..b8c87c27b7 100644 --- a/src/bun.js/event_loop.zig +++ b/src/bun.js/event_loop.zig @@ -10,16 +10,16 @@ const Bun = JSC.API.Bun; const TaggedPointerUnion = @import("../ptr.zig").TaggedPointerUnion; const typeBaseName = @import("../meta.zig").typeBaseName; const AsyncGlobWalkTask = JSC.API.Glob.WalkTask.AsyncGlobWalkTask; -const CopyFilePromiseTask = bun.JSC.WebCore.Blob.Store.CopyFilePromiseTask; +const CopyFilePromiseTask = bun.webcore.Blob.copy_file.CopyFilePromiseTask; const AsyncTransformTask = JSC.API.JSTranspiler.TransformTask.AsyncTransformTask; -const ReadFileTask = bun.JSC.WebCore.Blob.ReadFileTask; -const WriteFileTask = bun.JSC.WebCore.Blob.WriteFileTask; -const napi_async_work = JSC.napi.napi_async_work; +const ReadFileTask = bun.webcore.Blob.read_file.ReadFileTask; +const WriteFileTask = bun.webcore.Blob.write_file.WriteFileTask; +const napi_async_work = bun.api.napi.napi_async_work; const FetchTasklet = Fetch.FetchTasklet; const S3 = bun.S3; const S3HttpSimpleTask = S3.S3HttpSimpleTask; const S3HttpDownloadStreamingTask = S3.S3HttpDownloadStreamingTask; -const NapiFinalizerTask = bun.JSC.napi.NapiFinalizerTask; +const NapiFinalizerTask = bun.api.napi.NapiFinalizerTask; const Waker = bun.Async.Waker; @@ -100,7 +100,7 @@ pub fn WorkTask(comptime Context: type) type { allocator: std.mem.Allocator, globalThis: *JSC.JSGlobalObject, concurrent_task: ConcurrentTask = .{}, - async_task_tracker: JSC.AsyncTaskTracker, + async_task_tracker: JSC.Debugger.AsyncTaskTracker, // This is a poll because we want it to enter the uSockets loop ref: Async.KeepAlive = .{}, @@ -112,7 +112,7 @@ pub fn WorkTask(comptime Context: type) type { .ctx = value, .allocator = allocator, .globalThis = globalThis, - .async_task_tracker = JSC.AsyncTaskTracker.init(vm), + .async_task_tracker = JSC.Debugger.AsyncTaskTracker.init(vm), }); this.ref.ref(this.event_loop.virtual_machine); @@ -378,56 +378,57 @@ pub const JSCScheduler = struct { } }; -const ThreadSafeFunction = JSC.napi.ThreadSafeFunction; -const HotReloadTask = JSC.HotReloader.HotReloadTask; -const FSWatchTask = JSC.Node.FSWatcher.FSWatchTask; +const ThreadSafeFunction = bun.api.napi.ThreadSafeFunction; +const HotReloadTask = JSC.hot_reloader.HotReloader.Task; +const FSWatchTask = bun.api.node.fs.Watcher.FSWatchTask; const PollPendingModulesTask = JSC.ModuleLoader.AsyncModule.Queue; // const PromiseTask = JSInternalPromise.Completion.PromiseTask; -const GetAddrInfoRequestTask = JSC.DNS.GetAddrInfoRequest.Task; +const GetAddrInfoRequestTask = bun.api.DNS.GetAddrInfoRequest.Task; const JSCDeferredWorkTask = JSCScheduler.JSCDeferredWorkTask; -const Stat = JSC.Node.Async.stat; -const Lstat = JSC.Node.Async.lstat; -const Fstat = JSC.Node.Async.fstat; -const Open = JSC.Node.Async.open; -const ReadFile = JSC.Node.Async.readFile; -const WriteFile = JSC.Node.Async.writeFile; -const CopyFile = JSC.Node.Async.copyFile; -const Read = JSC.Node.Async.read; -const Write = JSC.Node.Async.write; -const Truncate = JSC.Node.Async.truncate; -const FTruncate = JSC.Node.Async.ftruncate; -const Readdir = JSC.Node.Async.readdir; -const ReaddirRecursive = JSC.Node.Async.readdir_recursive; -const Readv = JSC.Node.Async.readv; -const Writev = JSC.Node.Async.writev; -const Close = JSC.Node.Async.close; -const Rm = JSC.Node.Async.rm; -const Rmdir = JSC.Node.Async.rmdir; -const Chown = JSC.Node.Async.chown; -const FChown = JSC.Node.Async.fchown; -const Utimes = JSC.Node.Async.utimes; -const Lutimes = JSC.Node.Async.lutimes; -const Chmod = JSC.Node.Async.chmod; -const Fchmod = JSC.Node.Async.fchmod; -const Link = JSC.Node.Async.link; -const Symlink = JSC.Node.Async.symlink; -const Readlink = JSC.Node.Async.readlink; -const Realpath = JSC.Node.Async.realpath; -const RealpathNonNative = JSC.Node.Async.realpathNonNative; -const Mkdir = JSC.Node.Async.mkdir; -const Fsync = JSC.Node.Async.fsync; -const Rename = JSC.Node.Async.rename; -const Fdatasync = JSC.Node.Async.fdatasync; -const Access = JSC.Node.Async.access; -const AppendFile = JSC.Node.Async.appendFile; -const Mkdtemp = JSC.Node.Async.mkdtemp; -const Exists = JSC.Node.Async.exists; -const Futimes = JSC.Node.Async.futimes; -const Lchmod = JSC.Node.Async.lchmod; -const Lchown = JSC.Node.Async.lchown; -const StatFS = JSC.Node.Async.statfs; -const Unlink = JSC.Node.Async.unlink; +const AsyncFS = bun.api.node.fs.Async; +const Stat = AsyncFS.stat; +const Lstat = AsyncFS.lstat; +const Fstat = AsyncFS.fstat; +const Open = AsyncFS.open; +const ReadFile = AsyncFS.readFile; +const WriteFile = AsyncFS.writeFile; +const CopyFile = AsyncFS.copyFile; +const Read = AsyncFS.read; +const Write = AsyncFS.write; +const Truncate = AsyncFS.truncate; +const FTruncate = AsyncFS.ftruncate; +const Readdir = AsyncFS.readdir; +const ReaddirRecursive = AsyncFS.readdir_recursive; +const Readv = AsyncFS.readv; +const Writev = AsyncFS.writev; +const Close = AsyncFS.close; +const Rm = AsyncFS.rm; +const Rmdir = AsyncFS.rmdir; +const Chown = AsyncFS.chown; +const FChown = AsyncFS.fchown; +const Utimes = AsyncFS.utimes; +const Lutimes = AsyncFS.lutimes; +const Chmod = AsyncFS.chmod; +const Fchmod = AsyncFS.fchmod; +const Link = AsyncFS.link; +const Symlink = AsyncFS.symlink; +const Readlink = AsyncFS.readlink; +const Realpath = AsyncFS.realpath; +const RealpathNonNative = AsyncFS.realpathNonNative; +const Mkdir = AsyncFS.mkdir; +const Fsync = AsyncFS.fsync; +const Rename = AsyncFS.rename; +const Fdatasync = AsyncFS.fdatasync; +const Access = AsyncFS.access; +const AppendFile = AsyncFS.appendFile; +const Mkdtemp = AsyncFS.mkdtemp; +const Exists = AsyncFS.exists; +const Futimes = AsyncFS.futimes; +const Lchmod = AsyncFS.lchmod; +const Lchown = AsyncFS.lchown; +const StatFS = AsyncFS.statfs; +const Unlink = AsyncFS.unlink; const NativeZlib = JSC.API.NativeZlib; const NativeBrotli = JSC.API.NativeBrotli; @@ -445,14 +446,14 @@ const ShellAsync = bun.shell.Interpreter.Async; // const ShellIOReaderAsyncDeinit = bun.shell.Interpreter.IOReader.AsyncDeinit; const ShellIOReaderAsyncDeinit = bun.shell.Interpreter.AsyncDeinitReader; const ShellIOWriterAsyncDeinit = bun.shell.Interpreter.AsyncDeinitWriter; -const TimeoutObject = JSC.BunTimer.TimeoutObject; -const ImmediateObject = JSC.BunTimer.ImmediateObject; +const TimeoutObject = Timer.TimeoutObject; +const ImmediateObject = Timer.ImmediateObject; const ProcessWaiterThreadTask = if (Environment.isPosix) bun.spawn.process.WaiterThread.ProcessQueue.ResultTask else opaque {}; const ProcessMiniEventLoopWaiterThreadTask = if (Environment.isPosix) bun.spawn.WaiterThread.ProcessMiniEventLoopQueue.ResultTask else opaque {}; const ShellAsyncSubprocessDone = bun.shell.Interpreter.Cmd.ShellAsyncSubprocessDone; -const RuntimeTranspilerStore = JSC.RuntimeTranspilerStore; +const RuntimeTranspilerStore = JSC.ModuleLoader.RuntimeTranspilerStore; const ServerAllConnectionsClosedTask = @import("./api/server.zig").ServerAllConnectionsClosedTask; -const FlushPendingFileSinkTask = JSC.WebCore.FlushPendingFileSinkTask; +const FlushPendingFileSinkTask = bun.webcore.FileSink.FlushPendingTask; // Task.get(ReadFileTask) -> ?ReadFileTask pub const Task = TaggedPointerUnion(.{ @@ -824,8 +825,8 @@ pub const EventLoop = struct { /// - immediate_tasks: tasks that will run on the current tick /// /// Having two queues avoids infinite loops creating by calling `setImmediate` in a `setImmediate` callback. - immediate_tasks: std.ArrayListUnmanaged(*JSC.BunTimer.ImmediateObject) = .{}, - next_immediate_tasks: std.ArrayListUnmanaged(*JSC.BunTimer.ImmediateObject) = .{}, + immediate_tasks: std.ArrayListUnmanaged(*Timer.ImmediateObject) = .{}, + next_immediate_tasks: std.ArrayListUnmanaged(*Timer.ImmediateObject) = .{}, concurrent_tasks: ConcurrentTask.Queue = ConcurrentTask.Queue{}, global: *JSC.JSGlobalObject = undefined, @@ -838,7 +839,7 @@ pub const EventLoop = struct { debug: Debug = .{}, entered_event_loop_count: isize = 0, concurrent_ref: std.atomic.Value(i32) = std.atomic.Value(i32).init(0), - imminent_gc_timer: std.atomic.Value(?*JSC.BunTimer.WTFTimer) = .{ .raw = null }, + imminent_gc_timer: std.atomic.Value(?*Timer.WTFTimer) = .{ .raw = null }, signal_handler: if (Environment.isPosix) ?*PosixSignalHandle else void = if (Environment.isPosix) null, @@ -1113,8 +1114,8 @@ pub const EventLoop = struct { transform_task.*.runFromJS(); transform_task.deinit(); }, - @field(Task.Tag, @typeName(JSC.napi.napi_async_work)) => { - const transform_task: *JSC.napi.napi_async_work = task.get(JSC.napi.napi_async_work).?; + @field(Task.Tag, @typeName(bun.api.napi.napi_async_work)) => { + const transform_task: *bun.api.napi.napi_async_work = task.get(bun.api.napi.napi_async_work).?; transform_task.*.runFromJS(); }, @field(Task.Tag, @typeName(ThreadSafeFunction)) => { @@ -1723,7 +1724,7 @@ pub const EventLoop = struct { this.tasks.writeItem(task) catch unreachable; } - pub fn enqueueImmediateTask(this: *EventLoop, task: *JSC.BunTimer.ImmediateObject) void { + pub fn enqueueImmediateTask(this: *EventLoop, task: *Timer.ImmediateObject) void { this.immediate_tasks.append(bun.default_allocator, task) catch bun.outOfMemory(); } @@ -1917,12 +1918,6 @@ pub fn AbstractVM(inner: anytype) switch (@TypeOf(inner)) { @compileError("Invalid event loop ctx: " ++ @typeName(@TypeOf(inner))); } -// pub const EventLoopRefImpl = struct { -// fn enqueueTask(ref: anytype) { -// const event_loop_ctx = -// } -// }; - pub const MiniEventLoop = struct { tasks: Queue, concurrent_tasks: ConcurrentTaskQueue = .{}, @@ -1934,8 +1929,8 @@ pub const MiniEventLoop = struct { after_event_loop_callback_ctx: ?*anyopaque = null, after_event_loop_callback: ?JSC.OpaqueCallback = null, pipe_read_buffer: ?*PipeReadBuffer = null, - stdout_store: ?*bun.JSC.WebCore.Blob.Store = null, - stderr_store: ?*bun.JSC.WebCore.Blob.Store = null, + stdout_store: ?*bun.webcore.Blob.Store = null, + stderr_store: ?*bun.webcore.Blob.Store = null, const PipeReadBuffer = [256 * 1024]u8; pub threadlocal var globalInitialized: bool = false; @@ -2140,7 +2135,7 @@ pub const MiniEventLoop = struct { .ref_count = std.atomic.Value(u32).init(2), .allocator = bun.default_allocator, .data = .{ - .file = JSC.WebCore.Blob.FileStore{ + .file = .{ .pathlike = .{ .fd = fd, }, @@ -2171,7 +2166,7 @@ pub const MiniEventLoop = struct { .ref_count = std.atomic.Value(u32).init(2), .allocator = bun.default_allocator, .data = .{ - .file = JSC.WebCore.Blob.FileStore{ + .file = .{ .pathlike = .{ .fd = fd, }, @@ -2570,3 +2565,5 @@ pub const PosixSignalTask = struct { Bun__onSignalForJS(number, globalObject); } }; + +const Timer = bun.api.Timer; diff --git a/src/bun.js/hot_reloader.zig b/src/bun.js/hot_reloader.zig new file mode 100644 index 0000000000..26bcf6c30f --- /dev/null +++ b/src/bun.js/hot_reloader.zig @@ -0,0 +1,491 @@ +pub const ImportWatcher = union(enum) { + none: void, + hot: *Watcher, + watch: *Watcher, + + pub fn start(this: ImportWatcher) !void { + switch (this) { + inline .hot => |w| try w.start(), + inline .watch => |w| try w.start(), + else => {}, + } + } + + pub inline fn watchlist(this: ImportWatcher) Watcher.WatchList { + return switch (this) { + inline .hot, .watch => |w| w.watchlist, + else => .{}, + }; + } + + pub inline fn indexOf(this: ImportWatcher, hash: Watcher.HashType) ?u32 { + return switch (this) { + inline .hot, .watch => |w| w.indexOf(hash), + else => null, + }; + } + + pub inline fn addFile( + this: ImportWatcher, + fd: bun.FD, + file_path: string, + hash: Watcher.HashType, + loader: options.Loader, + dir_fd: bun.FD, + package_json: ?*bun.PackageJSON, + comptime copy_file_path: bool, + ) bun.JSC.Maybe(void) { + return switch (this) { + inline .hot, .watch => |watcher| watcher.addFile( + fd, + file_path, + hash, + loader, + dir_fd, + package_json, + copy_file_path, + ), + .none => .{ .result = {} }, + }; + } +}; + +pub const HotReloader = NewHotReloader(VirtualMachine, JSC.EventLoop, false); +pub const WatchReloader = NewHotReloader(VirtualMachine, JSC.EventLoop, true); + +extern fn BunDebugger__willHotReload() void; + +pub fn NewHotReloader(comptime Ctx: type, comptime EventLoopType: type, comptime reload_immediately: bool) type { + return struct { + const Reloader = @This(); + + ctx: *Ctx, + verbose: bool = false, + pending_count: std.atomic.Value(u32) = std.atomic.Value(u32).init(0), + + tombstones: bun.StringHashMapUnmanaged(*bun.fs.FileSystem.RealFS.EntriesOption) = .{}, + + pub fn init(ctx: *Ctx, fs: *bun.fs.FileSystem, verbose: bool, clear_screen_flag: bool) *Watcher { + const reloader = bun.default_allocator.create(Reloader) catch bun.outOfMemory(); + reloader.* = .{ + .ctx = ctx, + .verbose = Environment.enable_logs or verbose, + }; + + clear_screen = clear_screen_flag; + const watcher = Watcher.init(Reloader, reloader, fs, bun.default_allocator) catch |err| { + bun.handleErrorReturnTrace(err, @errorReturnTrace()); + Output.panic("Failed to enable File Watcher: {s}", .{@errorName(err)}); + }; + watcher.start() catch |err| { + bun.handleErrorReturnTrace(err, @errorReturnTrace()); + Output.panic("Failed to start File Watcher: {s}", .{@errorName(err)}); + }; + return watcher; + } + + fn debug(comptime fmt: string, args: anytype) void { + if (Environment.enable_logs) { + Output.scoped(.hot_reloader, false)(fmt, args); + } else { + Output.prettyErrorln("watcher: " ++ fmt, args); + } + } + + pub fn eventLoop(this: @This()) *EventLoopType { + return this.ctx.eventLoop(); + } + + pub fn enqueueTaskConcurrent(this: @This(), task: *JSC.ConcurrentTask) void { + if (comptime reload_immediately) + unreachable; + + this.eventLoop().enqueueTaskConcurrent(task); + } + + pub var clear_screen = false; + + pub const Task = struct { + count: u8 = 0, + hashes: [8]u32, + paths: if (Ctx == bun.bake.DevServer) [8][]const u8 else void, + /// Left uninitialized until .enqueue + concurrent_task: JSC.ConcurrentTask, + reloader: *Reloader, + + pub fn initEmpty(reloader: *Reloader) Task { + return .{ + .reloader = reloader, + + .hashes = [_]u32{0} ** 8, + .paths = if (Ctx == bun.bake.DevServer) [_][]const u8{&.{}} ** 8, + .count = 0, + .concurrent_task = undefined, + }; + } + + pub fn append(this: *Task, id: u32) void { + if (this.count == 8) { + this.enqueue(); + this.count = 0; + } + + this.hashes[this.count] = id; + this.count += 1; + } + + pub fn run(this: *Task) void { + // Since we rely on the event loop for hot reloads, there can be + // a delay before the next reload begins. In the time between the + // last reload and the next one, we shouldn't schedule any more + // hot reloads. Since we reload literally everything, we don't + // need to worry about missing any changes. + // + // Note that we set the count _before_ we reload, so that if we + // get another hot reload request while we're reloading, we'll + // still enqueue it. + while (this.reloader.pending_count.swap(0, .monotonic) > 0) { + this.reloader.ctx.reload(this); + } + } + + pub fn enqueue(this: *Task) void { + JSC.markBinding(@src()); + if (this.count == 0) + return; + + if (comptime reload_immediately) { + Output.flush(); + if (comptime Ctx == ImportWatcher) { + if (this.reloader.ctx.rare_data) |rare| + rare.closeAllListenSocketsForWatchMode(); + } + bun.reloadProcess(bun.default_allocator, clear_screen, false); + unreachable; + } + + _ = this.reloader.pending_count.fetchAdd(1, .monotonic); + + BunDebugger__willHotReload(); + const that = bun.new(Task, .{ + .reloader = this.reloader, + .count = this.count, + .paths = this.paths, + .hashes = this.hashes, + .concurrent_task = undefined, + }); + that.concurrent_task = .{ .task = JSC.Task.init(that), .auto_delete = false }; + that.reloader.enqueueTaskConcurrent(&that.concurrent_task); + this.count = 0; + } + + pub fn deinit(this: *Task) void { + bun.destroy(this); + } + }; + + pub fn enableHotModuleReloading(this: *Ctx) void { + if (comptime @TypeOf(this.bun_watcher) == ImportWatcher) { + if (this.bun_watcher != .none) + return; + } else { + if (this.bun_watcher != null) + return; + } + + var reloader = bun.default_allocator.create(Reloader) catch bun.outOfMemory(); + reloader.* = .{ + .ctx = this, + .verbose = Environment.enable_logs or if (@hasField(Ctx, "log")) this.log.level.atLeast(.info) else false, + }; + + if (comptime @TypeOf(this.bun_watcher) == ImportWatcher) { + this.bun_watcher = if (reload_immediately) + .{ .watch = Watcher.init( + Reloader, + reloader, + this.transpiler.fs, + bun.default_allocator, + ) catch |err| { + bun.handleErrorReturnTrace(err, @errorReturnTrace()); + Output.panic("Failed to enable File Watcher: {s}", .{@errorName(err)}); + } } + else + .{ .hot = Watcher.init( + Reloader, + reloader, + this.transpiler.fs, + bun.default_allocator, + ) catch |err| { + bun.handleErrorReturnTrace(err, @errorReturnTrace()); + Output.panic("Failed to enable File Watcher: {s}", .{@errorName(err)}); + } }; + + if (reload_immediately) { + this.transpiler.resolver.watcher = bun.resolver.ResolveWatcher(*Watcher, Watcher.onMaybeWatchDirectory).init(this.bun_watcher.watch); + } else { + this.transpiler.resolver.watcher = bun.resolver.ResolveWatcher(*Watcher, Watcher.onMaybeWatchDirectory).init(this.bun_watcher.hot); + } + } else { + this.bun_watcher = Watcher.init( + Reloader, + reloader, + this.transpiler.fs, + bun.default_allocator, + ) catch |err| { + bun.handleErrorReturnTrace(err, @errorReturnTrace()); + Output.panic("Failed to enable File Watcher: {s}", .{@errorName(err)}); + }; + this.transpiler.resolver.watcher = bun.resolver.ResolveWatcher(*Watcher, Watcher.onMaybeWatchDirectory).init(this.bun_watcher.?); + } + + clear_screen = !this.transpiler.env.hasSetNoClearTerminalOnReload(!Output.enable_ansi_colors); + + reloader.getContext().start() catch @panic("Failed to start File Watcher"); + } + + fn putTombstone(this: *@This(), key: []const u8, value: *bun.fs.FileSystem.RealFS.EntriesOption) void { + this.tombstones.put(bun.default_allocator, key, value) catch unreachable; + } + + fn getTombstone(this: *@This(), key: []const u8) ?*bun.fs.FileSystem.RealFS.EntriesOption { + return this.tombstones.get(key); + } + + pub fn onError( + _: *@This(), + err: bun.sys.Error, + ) void { + Output.err(@as(bun.sys.E, @enumFromInt(err.errno)), "Watcher crashed", .{}); + if (bun.Environment.isDebug) { + @panic("Watcher crash"); + } + } + + pub fn getContext(this: *@This()) *Watcher { + if (comptime @TypeOf(this.ctx.bun_watcher) == ImportWatcher) { + if (reload_immediately) { + return this.ctx.bun_watcher.watch; + } else { + return this.ctx.bun_watcher.hot; + } + } else if (@typeInfo(@TypeOf(this.ctx.bun_watcher)) == .optional) { + return this.ctx.bun_watcher.?; + } else { + return this.ctx.bun_watcher; + } + } + + pub noinline fn onFileUpdate( + this: *@This(), + events: []Watcher.WatchEvent, + changed_files: []?[:0]u8, + watchlist: Watcher.WatchList, + ) void { + const slice = watchlist.slice(); + const file_paths = slice.items(.file_path); + const counts = slice.items(.count); + const kinds = slice.items(.kind); + const hashes = slice.items(.hash); + const parents = slice.items(.parent_hash); + const file_descriptors = slice.items(.fd); + const ctx = this.getContext(); + defer ctx.flushEvictions(); + defer Output.flush(); + + const fs: *Fs.FileSystem = &Fs.FileSystem.instance; + const rfs: *Fs.FileSystem.RealFS = &fs.fs; + var _on_file_update_path_buf: bun.PathBuffer = undefined; + var current_task = Task.initEmpty(this); + defer current_task.enqueue(); + + for (events) |event| { + const file_path = file_paths[event.index]; + const update_count = counts[event.index] + 1; + counts[event.index] = update_count; + const kind = kinds[event.index]; + + // so it's consistent with the rest + // if we use .extname we might run into an issue with whether or not the "." is included. + // const path = Fs.PathName.init(file_path); + const current_hash = hashes[event.index]; + + switch (kind) { + .file => { + if (event.op.delete or event.op.rename) { + ctx.removeAtIndex( + event.index, + 0, + &.{}, + .file, + ); + } + + if (this.verbose) + debug("File changed: {s}", .{fs.relativeTo(file_path)}); + + if (event.op.write or event.op.delete or event.op.rename) { + current_task.append(current_hash); + } + + // TODO: delete events? + }, + .directory => { + if (comptime Environment.isWindows) { + // on windows we receive file events for all items affected by a directory change + // so we only need to clear the directory cache. all other effects will be handled + // by the file events + _ = this.ctx.bustDirCache(strings.withoutTrailingSlashWindowsPath(file_path)); + continue; + } + var affected_buf: [128][]const u8 = undefined; + var entries_option: ?*Fs.FileSystem.RealFS.EntriesOption = null; + + const affected = brk: { + if (comptime Environment.isMac) { + if (rfs.entries.get(file_path)) |existing| { + this.putTombstone(file_path, existing); + entries_option = existing; + } else if (this.getTombstone(file_path)) |existing| { + entries_option = existing; + } + + var affected_i: usize = 0; + + // if a file descriptor is stale, we need to close it + if (event.op.delete and entries_option != null) { + for (parents, 0..) |parent_hash, entry_id| { + if (parent_hash == current_hash) { + const affected_path = file_paths[entry_id]; + const was_deleted = check: { + std.posix.access(affected_path, std.posix.F_OK) catch break :check true; + break :check false; + }; + if (!was_deleted) continue; + + affected_buf[affected_i] = affected_path[file_path.len..]; + affected_i += 1; + if (affected_i >= affected_buf.len) break; + } + } + } + + break :brk affected_buf[0..affected_i]; + } + + break :brk event.names(changed_files); + }; + + if (affected.len > 0 and !Environment.isMac) { + if (rfs.entries.get(file_path)) |existing| { + this.putTombstone(file_path, existing); + entries_option = existing; + } else if (this.getTombstone(file_path)) |existing| { + entries_option = existing; + } + } + + _ = this.ctx.bustDirCache(strings.withoutTrailingSlashWindowsPath(file_path)); + + if (entries_option) |dir_ent| { + var last_file_hash: Watcher.HashType = std.math.maxInt(Watcher.HashType); + + for (affected) |changed_name_| { + const changed_name: []const u8 = if (comptime Environment.isMac) + changed_name_ + else + bun.asByteSlice(changed_name_.?); + if (changed_name.len == 0 or changed_name[0] == '~' or changed_name[0] == '.') continue; + + const loader = (this.ctx.getLoaders().get(Fs.PathName.init(changed_name).ext) orelse .file); + var prev_entry_id: usize = std.math.maxInt(usize); + if (loader != .file) { + var path_string: bun.PathString = undefined; + var file_hash: Watcher.HashType = last_file_hash; + const abs_path: string = brk: { + if (dir_ent.entries.get(@as([]const u8, @ptrCast(changed_name)))) |file_ent| { + // reset the file descriptor + file_ent.entry.cache.fd = .invalid; + file_ent.entry.need_stat = true; + path_string = file_ent.entry.abs_path; + file_hash = Watcher.getHash(path_string.slice()); + for (hashes, 0..) |hash, entry_id| { + if (hash == file_hash) { + if (file_descriptors[entry_id].isValid()) { + if (prev_entry_id != entry_id) { + current_task.append(hashes[entry_id]); + ctx.removeAtIndex( + @as(u16, @truncate(entry_id)), + 0, + &.{}, + .file, + ); + } + } + + prev_entry_id = entry_id; + break; + } + } + + break :brk path_string.slice(); + } else { + const file_path_without_trailing_slash = std.mem.trimRight(u8, file_path, std.fs.path.sep_str); + @memcpy(_on_file_update_path_buf[0..file_path_without_trailing_slash.len], file_path_without_trailing_slash); + _on_file_update_path_buf[file_path_without_trailing_slash.len] = std.fs.path.sep; + + @memcpy(_on_file_update_path_buf[file_path_without_trailing_slash.len..][0..changed_name.len], changed_name); + const path_slice = _on_file_update_path_buf[0 .. file_path_without_trailing_slash.len + changed_name.len + 1]; + file_hash = Watcher.getHash(path_slice); + break :brk path_slice; + } + }; + + // skip consecutive duplicates + if (last_file_hash == file_hash) continue; + last_file_hash = file_hash; + + if (this.verbose) + debug("File change: {s}", .{fs.relativeTo(abs_path)}); + } + } + } + + if (this.verbose) { + debug("Dir change: {s}", .{fs.relativeTo(file_path)}); + } + }, + } + } + } + }; +} + +const std = @import("std"); +const bun = @import("bun"); +const string = []const u8; +const Output = bun.Output; +const Global = bun.Global; +const Environment = bun.Environment; +const strings = bun.strings; +const options = bun.options; +const JSC = bun.JSC; +const MarkedArrayBuffer = bun.jsc.MarkedArrayBuffer; +const JSValue = JSC.JSValue; +const JSGlobalObject = JSC.JSGlobalObject; +const VirtualMachine = JSC.VirtualMachine; +const Watcher = bun.Watcher; +const ModuleLoader = JSC.ModuleLoader; +const FetchFlags = ModuleLoader.FetchFlags; +const TaggedPointerUnion = @import("../ptr.zig").TaggedPointerUnion; +pub const Buffer = MarkedArrayBuffer; +const Lock = bun.Mutex; +const Async = bun.Async; +const Ordinal = bun.Ordinal; +const SourceMap = @import("../sourcemap/sourcemap.zig"); +const ParsedSourceMap = SourceMap.ParsedSourceMap; +const MappingList = SourceMap.Mapping.List; +const SourceProviderMap = SourceMap.SourceProviderMap; +const uv = bun.windows.libuv; +const uws = bun.uws; +const Fs = bun.fs; diff --git a/src/bun.js/ipc.zig b/src/bun.js/ipc.zig index f6cf1c2632..5d2e5d5fd5 100644 --- a/src/bun.js/ipc.zig +++ b/src/bun.js/ipc.zig @@ -634,7 +634,7 @@ const NamedPipeIPCData = struct { const stream = this.writer.getStream() orelse { this.close(false); - return JSC.Maybe(void).errno(bun.C.E.PIPE, .pipe); + return JSC.Maybe(void).errno(bun.sys.E.PIPE, .pipe); }; const readStartResult = stream.readStart(instance, NewNamedPipeIPCHandler(Context).onReadAlloc, NewNamedPipeIPCHandler(Context).onReadError, NewNamedPipeIPCHandler(Context).onRead); @@ -898,7 +898,7 @@ fn NewNamedPipeIPCHandler(comptime Context: type) type { return available.ptr[0..suggested_size]; } - fn onReadError(this: *Context, err: bun.C.E) void { + fn onReadError(this: *Context, err: bun.sys.E) void { log("NewNamedPipeIPCHandler#onReadError {}", .{err}); if (this.ipc()) |ipc_data| { ipc_data.close(true); diff --git a/src/bun.js/javascript.zig b/src/bun.js/javascript.zig deleted file mode 100644 index b4908c2cfa..0000000000 --- a/src/bun.js/javascript.zig +++ /dev/null @@ -1,5029 +0,0 @@ -const std = @import("std"); -const StaticExport = @import("./bindings/static_export.zig"); -const bun = @import("bun"); -const string = bun.string; -const Output = bun.Output; -const Global = bun.Global; -const Environment = bun.Environment; -const strings = bun.strings; -const MutableString = bun.MutableString; -const stringZ = bun.stringZ; -const default_allocator = bun.default_allocator; -const StoredFileDescriptorType = bun.StoredFileDescriptorType; -const ErrorableString = bun.JSC.ErrorableString; -const Arena = @import("../allocators/mimalloc_arena.zig").Arena; - -const Exception = bun.JSC.Exception; -const Allocator = std.mem.Allocator; -const IdentityContext = @import("../identity_context.zig").IdentityContext; -const Fs = @import("../fs.zig"); -const Resolver = @import("../resolver/resolver.zig"); -const ast = @import("../import_record.zig"); -const MacroEntryPoint = bun.transpiler.EntryPoints.MacroEntryPoint; -const ParseResult = bun.transpiler.ParseResult; -const logger = bun.logger; -const Api = @import("../api/schema.zig").Api; -const options = @import("../options.zig"); -const Transpiler = bun.Transpiler; -const PluginRunner = bun.transpiler.PluginRunner; -const ServerEntryPoint = bun.transpiler.EntryPoints.ServerEntryPoint; -const js_printer = bun.js_printer; -const js_parser = bun.js_parser; -const js_ast = bun.JSAst; -const NodeFallbackModules = @import("../node_fallbacks.zig"); -const ImportKind = ast.ImportKind; -const Analytics = @import("../analytics/analytics_thread.zig"); -const ZigString = bun.JSC.ZigString; -const Runtime = @import("../runtime.zig"); -const Router = @import("./api/filesystem_router.zig"); -const ImportRecord = ast.ImportRecord; -const DotEnv = @import("../env_loader.zig"); -const PackageJSON = @import("../resolver/package_json.zig").PackageJSON; -const MacroRemap = @import("../resolver/package_json.zig").MacroMap; -const String = bun.String; -const JSC = bun.JSC; -const JSError = @import("./base.zig").JSError; -const d = @import("./base.zig").d; -const MarkedArrayBuffer = @import("./base.zig").MarkedArrayBuffer; -const getAllocator = @import("./base.zig").getAllocator; -const JSValue = bun.JSC.JSValue; -const NewClass = @import("./base.zig").NewClass; - -const JSGlobalObject = bun.JSC.JSGlobalObject; -const JSPrivateDataPtr = bun.JSC.JSPrivateDataPtr; -const ConsoleObject = bun.JSC.ConsoleObject; -const Node = bun.JSC.Node; -const ZigException = bun.JSC.ZigException; -const ZigStackTrace = bun.JSC.ZigStackTrace; -const ErrorableResolvedSource = bun.JSC.ErrorableResolvedSource; -const ResolvedSource = bun.JSC.ResolvedSource; -const JSInternalPromise = bun.JSC.JSInternalPromise; -const JSModuleLoader = bun.JSC.JSModuleLoader; -const JSPromiseRejectionOperation = bun.JSC.JSPromiseRejectionOperation; -const ErrorableZigString = bun.JSC.ErrorableZigString; -const VM = JSC.VM; -const JSFunction = bun.JSC.JSFunction; -const Config = @import("./config.zig"); -const URL = @import("../url.zig").URL; -const Bun = JSC.API.Bun; -const EventLoop = bun.JSC.EventLoop; -const PendingResolution = @import("../resolver/resolver.zig").PendingResolution; -const ThreadSafeFunction = JSC.napi.ThreadSafeFunction; -const PackageManager = @import("../install/install.zig").PackageManager; -const IPC = @import("ipc.zig"); -const DNSResolver = @import("api/bun/dns_resolver.zig").DNSResolver; -const Watcher = bun.Watcher; -const node_module_module = @import("./bindings/NodeModuleModule.zig"); - -const ModuleLoader = JSC.ModuleLoader; -const FetchFlags = JSC.FetchFlags; - -const TaggedPointerUnion = @import("../ptr.zig").TaggedPointerUnion; -const Task = JSC.Task; - -pub const Buffer = MarkedArrayBuffer; -const Lock = bun.Mutex; -const Async = bun.Async; - -const Ordinal = bun.Ordinal; - -pub const OpaqueCallback = *const fn (current: ?*anyopaque) callconv(.C) void; -pub fn OpaqueWrap(comptime Context: type, comptime Function: fn (this: *Context) void) OpaqueCallback { - return struct { - pub fn callback(ctx: ?*anyopaque) callconv(.C) void { - const context: *Context = @as(*Context, @ptrCast(@alignCast(ctx.?))); - Function(context); - } - }.callback; -} - -pub const bun_file_import_path = "/node_modules.server.bun"; - -export var has_bun_garbage_collector_flag_enabled = false; - -const SourceMap = @import("../sourcemap/sourcemap.zig"); -const ParsedSourceMap = SourceMap.ParsedSourceMap; -const MappingList = SourceMap.Mapping.List; -const SourceProviderMap = SourceMap.SourceProviderMap; - -const uv = bun.windows.libuv; - -pub const SavedSourceMap = struct { - /// This is a pointer to the map located on the VirtualMachine struct - map: *HashTable, - mutex: bun.Mutex = .{}, - - pub const vlq_offset = 24; - - pub fn init(this: *SavedSourceMap, map: *HashTable) void { - this.* = .{ - .map = map, - .mutex = .{}, - }; - - this.map.lockPointers(); - } - - pub inline fn lock(map: *SavedSourceMap) void { - map.mutex.lock(); - map.map.unlockPointers(); - } - - pub inline fn unlock(map: *SavedSourceMap) void { - map.map.lockPointers(); - map.mutex.unlock(); - } - - // For the runtime, we store the number of mappings and how many bytes the final list is at the beginning of the array - // The first 8 bytes are the length of the array - // The second 8 bytes are the number of mappings - pub const SavedMappings = struct { - data: [*]u8, - - pub fn vlq(this: SavedMappings) []u8 { - return this.data[vlq_offset..this.len()]; - } - - pub inline fn len(this: SavedMappings) usize { - return @as(u64, @bitCast(this.data[0..8].*)); - } - - pub fn deinit(this: SavedMappings) void { - default_allocator.free(this.data[0..this.len()]); - } - - pub fn toMapping(this: SavedMappings, allocator: Allocator, path: string) anyerror!ParsedSourceMap { - const result = SourceMap.Mapping.parse( - allocator, - this.data[vlq_offset..this.len()], - @as(usize, @bitCast(this.data[8..16].*)), - 1, - @as(usize, @bitCast(this.data[16..24].*)), - ); - switch (result) { - .fail => |fail| { - if (Output.enable_ansi_colors_stderr) { - try fail.toData(path).writeFormat( - Output.errorWriter(), - logger.Kind.warn, - false, - true, - ); - } else { - try fail.toData(path).writeFormat( - Output.errorWriter(), - logger.Kind.warn, - false, - false, - ); - } - - return fail.err; - }, - .success => |success| { - return success; - }, - } - } - }; - - /// ParsedSourceMap is the canonical form for sourcemaps, - /// - /// but `SavedMappings` and `SourceProviderMap` are much cheaper to construct. - /// In `fn get`, this value gets converted to ParsedSourceMap always - pub const Value = TaggedPointerUnion(.{ - ParsedSourceMap, - SavedMappings, - SourceProviderMap, - }); - - pub const MissingSourceMapNoteInfo = struct { - pub var storage: bun.PathBuffer = undefined; - pub var path: ?[]const u8 = null; - pub var seen_invalid = false; - - pub fn print() void { - if (seen_invalid) return; - if (path) |note| { - Output.note("missing sourcemaps for {s}", .{note}); - Output.note("consider bundling with '--sourcemap' to get unminified traces", .{}); - } - } - }; - - pub fn putZigSourceProvider(this: *SavedSourceMap, opaque_source_provider: *anyopaque, path: []const u8) void { - const source_provider: *SourceProviderMap = @ptrCast(opaque_source_provider); - this.putValue(path, Value.init(source_provider)) catch bun.outOfMemory(); - } - - pub fn removeZigSourceProvider(this: *SavedSourceMap, opaque_source_provider: *anyopaque, path: []const u8) void { - this.lock(); - defer this.unlock(); - - const entry = this.map.getEntry(bun.hash(path)) orelse return; - const old_value = Value.from(entry.value_ptr.*); - if (old_value.get(SourceProviderMap)) |prov| { - if (@intFromPtr(prov) == @intFromPtr(opaque_source_provider)) { - // there is nothing to unref or deinit - this.map.removeByPtr(entry.key_ptr); - } - } else if (old_value.get(ParsedSourceMap)) |map| { - if (map.underlying_provider.provider()) |prov| { - if (@intFromPtr(prov) == @intFromPtr(opaque_source_provider)) { - this.map.removeByPtr(entry.key_ptr); - map.deref(); - } - } - } - } - - pub const HashTable = std.HashMap(u64, *anyopaque, IdentityContext(u64), 80); - - pub fn onSourceMapChunk(this: *SavedSourceMap, chunk: SourceMap.Chunk, source: logger.Source) anyerror!void { - try this.putMappings(source, chunk.buffer); - } - - pub const SourceMapHandler = js_printer.SourceMapHandler.For(SavedSourceMap, onSourceMapChunk); - - pub fn deinit(this: *SavedSourceMap) void { - { - this.lock(); - defer this.unlock(); - - var iter = this.map.valueIterator(); - while (iter.next()) |val| { - var value = Value.from(val.*); - if (value.get(ParsedSourceMap)) |source_map| { - source_map.deref(); - } else if (value.get(SavedMappings)) |saved_mappings| { - var saved = SavedMappings{ .data = @as([*]u8, @ptrCast(saved_mappings)) }; - saved.deinit(); - } else if (value.get(SourceProviderMap)) |provider| { - _ = provider; // do nothing, we did not hold a ref to ZigSourceProvider - } - } - } - - this.map.unlockPointers(); - this.map.deinit(); - } - - pub fn putMappings(this: *SavedSourceMap, source: logger.Source, mappings: MutableString) !void { - try this.putValue(source.path.text, Value.init(bun.cast(*SavedMappings, mappings.list.items.ptr))); - } - - fn putValue(this: *SavedSourceMap, path: []const u8, value: Value) !void { - this.lock(); - defer this.unlock(); - - const entry = try this.map.getOrPut(bun.hash(path)); - if (entry.found_existing) { - var old_value = Value.from(entry.value_ptr.*); - if (old_value.get(ParsedSourceMap)) |parsed_source_map| { - var source_map: *ParsedSourceMap = parsed_source_map; - source_map.deref(); - } else if (old_value.get(SavedMappings)) |saved_mappings| { - var saved = SavedMappings{ .data = @as([*]u8, @ptrCast(saved_mappings)) }; - saved.deinit(); - } else if (old_value.get(SourceProviderMap)) |provider| { - _ = provider; // do nothing, we did not hold a ref to ZigSourceProvider - } - } - entry.value_ptr.* = value.ptr(); - } - - /// You must call `sourcemap.map.deref()` or you will leak memory - fn getWithContent( - this: *SavedSourceMap, - path: string, - hint: SourceMap.ParseUrlResultHint, - ) SourceMap.ParseUrl { - const hash = bun.hash(path); - - // This lock is for the hash table - this.lock(); - - // This mapping entry is only valid while the mutex is locked - const mapping = this.map.getEntry(hash) orelse { - this.unlock(); - return .{}; - }; - - switch (Value.from(mapping.value_ptr.*).tag()) { - @field(Value.Tag, @typeName(ParsedSourceMap)) => { - defer this.unlock(); - const map = Value.from(mapping.value_ptr.*).as(ParsedSourceMap); - map.ref(); - return .{ .map = map }; - }, - @field(Value.Tag, @typeName(SavedMappings)) => { - defer this.unlock(); - var saved = SavedMappings{ .data = @as([*]u8, @ptrCast(Value.from(mapping.value_ptr.*).as(ParsedSourceMap))) }; - defer saved.deinit(); - const result = bun.new(ParsedSourceMap, saved.toMapping(default_allocator, path) catch { - _ = this.map.remove(mapping.key_ptr.*); - return .{}; - }); - mapping.value_ptr.* = Value.init(result).ptr(); - result.ref(); - - return .{ .map = result }; - }, - @field(Value.Tag, @typeName(SourceProviderMap)) => { - const ptr: *SourceProviderMap = Value.from(mapping.value_ptr.*).as(SourceProviderMap); - this.unlock(); - - // Do not lock the mutex while we're parsing JSON! - if (ptr.getSourceMap(path, .none, hint)) |parse| { - if (parse.map) |map| { - map.ref(); - // The mutex is not locked. We have to check the hash table again. - this.putValue(path, Value.init(map)) catch bun.outOfMemory(); - - return parse; - } - } - - this.lock(); - defer this.unlock(); - // does not have a valid source map. let's not try again - _ = this.map.remove(hash); - - // Store path for a user note. - const storage = MissingSourceMapNoteInfo.storage[0..path.len]; - @memcpy(storage, path); - MissingSourceMapNoteInfo.path = storage; - return .{}; - }, - else => { - if (Environment.allow_assert) { - @panic("Corrupt pointer tag"); - } - this.unlock(); - return .{}; - }, - } - } - - /// You must `deref()` the returned value or you will leak memory - pub fn get(this: *SavedSourceMap, path: string) ?*ParsedSourceMap { - return this.getWithContent(path, .mappings_only).map; - } - - pub fn resolveMapping( - this: *SavedSourceMap, - path: []const u8, - line: i32, - column: i32, - source_handling: SourceMap.SourceContentHandling, - ) ?SourceMap.Mapping.Lookup { - const parse = this.getWithContent(path, switch (source_handling) { - .no_source_contents => .mappings_only, - .source_contents => .{ .all = .{ .line = line, .column = column } }, - }); - const map = parse.map orelse return null; - - const mapping = parse.mapping orelse - SourceMap.Mapping.find(map.mappings, line, column) orelse - return null; - - return .{ - .mapping = mapping, - .source_map = map, - .prefetched_source_code = parse.source_contents, - }; - } -}; -const uws = bun.uws; - -pub export fn Bun__getVM() *JSC.VirtualMachine { - return JSC.VirtualMachine.get(); -} - -pub export fn Bun__drainMicrotasks() void { - JSC.VirtualMachine.get().eventLoop().tick(); -} - -export fn Bun__readOriginTimer(vm: *JSC.VirtualMachine) u64 { - return vm.origin_timer.read(); -} - -export fn Bun__readOriginTimerStart(vm: *JSC.VirtualMachine) f64 { - // timespce to milliseconds - return @as(f64, @floatCast((@as(f64, @floatFromInt(vm.origin_timestamp)) + JSC.VirtualMachine.origin_relative_epoch) / 1_000_000.0)); -} - -pub export fn Bun__GlobalObject__hasIPC(global: *JSGlobalObject) bool { - return global.bunVM().ipc != null; -} - -pub extern fn Bun__Process__queueNextTick1(*JSGlobalObject, func: JSValue, JSValue) void; -pub extern fn Bun__Process__queueNextTick2(*JSGlobalObject, func: JSValue, JSValue, JSValue) void; - -comptime { - const Bun__Process__send = JSC.toJSHostFunction(Bun__Process__send_); - @export(&Bun__Process__send, .{ .name = "Bun__Process__send" }); -} -pub fn Bun__Process__send_(globalObject: *JSGlobalObject, callFrame: *JSC.CallFrame) bun.JSError!JSValue { - JSC.markBinding(@src()); - var message, var handle, var options_, var callback = callFrame.argumentsAsArray(4); - - if (handle.isFunction()) { - callback = handle; - handle = .undefined; - options_ = .undefined; - } else if (options_.isFunction()) { - callback = options_; - options_ = .undefined; - } else if (!options_.isUndefined()) { - try globalObject.validateObject("options", options_, .{}); - } - - const S = struct { - fn impl(globalThis: *JSGlobalObject, callframe: *JSC.CallFrame) bun.JSError!JSValue { - const arguments_ = callframe.arguments_old(1).slice(); - const ex = arguments_[0]; - VirtualMachine.Process__emitErrorEvent(globalThis, ex); - return .undefined; - } - }; - - const vm = globalObject.bunVM(); - const ipc_instance = vm.getIPCInstance() orelse { - const ex = globalObject.ERR(.IPC_CHANNEL_CLOSED, "Channel closed.", .{}).toJS(); - if (callback.isFunction()) { - Bun__Process__queueNextTick1(globalObject, callback, ex); - } else { - const fnvalue = JSFunction.create(globalObject, "", S.impl, 1, .{}); - Bun__Process__queueNextTick1(globalObject, fnvalue, ex); - } - return .false; - }; - - if (message.isUndefined()) { - return globalObject.throwMissingArgumentsValue(&.{"message"}); - } - if (!message.isString() and !message.isObject() and !message.isNumber() and !message.isBoolean() and !message.isNull()) { - return globalObject.throwInvalidArgumentTypeValue("message", "string, object, number, or boolean", message); - } - - const good = ipc_instance.data.serializeAndSend(globalObject, message); - - if (good) { - if (callback.isFunction()) { - Bun__Process__queueNextTick1(globalObject, callback, .null); - } - } else { - const ex = globalObject.createTypeErrorInstance("process.send() failed", .{}); - ex.put(globalObject, ZigString.static("syscall"), bun.String.static("write").toJS(globalObject)); - if (callback.isFunction()) { - Bun__Process__queueNextTick1(globalObject, callback, ex); - } else { - const fnvalue = JSFunction.create(globalObject, "", S.impl, 1, .{}); - Bun__Process__queueNextTick1(globalObject, fnvalue, ex); - } - } - - return .true; -} - -pub export fn Bun__isBunMain(globalObject: *JSGlobalObject, str: *const bun.String) bool { - return str.eqlUTF8(globalObject.bunVM().main); -} - -/// When IPC environment variables are passed, the socket is not immediately opened, -/// but rather we wait for process.on('message') or process.send() to be called, THEN -/// we open the socket. This is to avoid missing messages at the start of the program. -pub export fn Bun__ensureProcessIPCInitialized(globalObject: *JSGlobalObject) void { - // getIPC() will initialize a "waiting" ipc instance so this is enough. - // it will do nothing if IPC is not enabled. - _ = globalObject.bunVM().getIPCInstance(); -} - -/// This function is called on the main thread -/// The bunVM() call will assert this -pub export fn Bun__queueTask(global: *JSGlobalObject, task: *JSC.CppTask) void { - JSC.markBinding(@src()); - - global.bunVM().eventLoop().enqueueTask(Task.init(task)); -} - -pub export fn Bun__queueTaskWithTimeout(global: *JSGlobalObject, task: *JSC.CppTask, milliseconds: i32) void { - JSC.markBinding(@src()); - - global.bunVM().eventLoop().enqueueTaskWithTimeout(Task.init(task), milliseconds); -} - -pub export fn Bun__reportUnhandledError(globalObject: *JSGlobalObject, value: JSValue) callconv(.C) JSValue { - JSC.markBinding(@src()); - // This JSGlobalObject might not be the main script execution context - // See the crash in https://github.com/oven-sh/bun/issues/9778 - const jsc_vm = JSC.VirtualMachine.get(); - _ = jsc_vm.uncaughtException(globalObject, value, false); - return .undefined; -} - -/// This function is called on another thread -/// The main difference: we need to allocate the task & wakeup the thread -/// We can avoid that if we run it from the main thread. -pub export fn Bun__queueTaskConcurrently(global: *JSGlobalObject, task: *JSC.CppTask) void { - JSC.markBinding(@src()); - - global.bunVMConcurrently().eventLoop().enqueueTaskConcurrent( - JSC.ConcurrentTask.create(Task.init(task)), - ); -} - -pub export fn Bun__handleRejectedPromise(global: *JSGlobalObject, promise: *JSC.JSPromise) void { - JSC.markBinding(@src()); - - const result = promise.result(global.vm()); - var jsc_vm = global.bunVM(); - - // this seems to happen in some cases when GC is running - if (result == .zero) - return; - - _ = jsc_vm.unhandledRejection(global, result, promise.asValue(global)); - jsc_vm.autoGarbageCollect(); -} - -pub export fn Bun__onDidAppendPlugin(jsc_vm: *VirtualMachine, globalObject: *JSGlobalObject) void { - if (jsc_vm.plugin_runner != null) { - return; - } - - jsc_vm.plugin_runner = PluginRunner{ - .global_object = globalObject, - .allocator = jsc_vm.allocator, - }; - jsc_vm.transpiler.linker.plugin_runner = &jsc_vm.plugin_runner.?; -} - -const WindowsOnly = struct { - pub fn Bun__ZigGlobalObject__uvLoop(jsc_vm: *VirtualMachine) callconv(.C) *bun.windows.libuv.Loop { - return jsc_vm.uvLoop(); - } -}; - -comptime { - if (Environment.isWindows) { - @export(&WindowsOnly.Bun__ZigGlobalObject__uvLoop, .{ .name = "Bun__ZigGlobalObject__uvLoop" }); - } -} - -pub const ExitHandler = struct { - exit_code: u8 = 0, - - pub export fn Bun__getExitCode(vm: *VirtualMachine) u8 { - return vm.exit_handler.exit_code; - } - - pub export fn Bun__setExitCode(vm: *VirtualMachine, code: u8) void { - vm.exit_handler.exit_code = code; - } - - extern fn Process__dispatchOnBeforeExit(*JSGlobalObject, code: u8) void; - extern fn Process__dispatchOnExit(*JSGlobalObject, code: u8) void; - extern fn Bun__closeAllSQLiteDatabasesForTermination() void; - - pub fn dispatchOnExit(this: *ExitHandler) void { - JSC.markBinding(@src()); - const vm: *VirtualMachine = @alignCast(@fieldParentPtr("exit_handler", this)); - Process__dispatchOnExit(vm.global, this.exit_code); - if (vm.isMainThread()) { - Bun__closeAllSQLiteDatabasesForTermination(); - } - } - - pub fn dispatchOnBeforeExit(this: *ExitHandler) void { - JSC.markBinding(@src()); - const vm: *VirtualMachine = @alignCast(@fieldParentPtr("exit_handler", this)); - Process__dispatchOnBeforeExit(vm.global, this.exit_code); - } -}; - -pub const WebWorker = @import("./web_worker.zig").WebWorker; - -pub const ImportWatcher = union(enum) { - none: void, - hot: *Watcher, - watch: *Watcher, - - pub fn start(this: ImportWatcher) !void { - switch (this) { - inline .hot => |w| try w.start(), - inline .watch => |w| try w.start(), - else => {}, - } - } - - pub inline fn watchlist(this: ImportWatcher) Watcher.WatchList { - return switch (this) { - inline .hot, .watch => |w| w.watchlist, - else => .{}, - }; - } - - pub inline fn indexOf(this: ImportWatcher, hash: Watcher.HashType) ?u32 { - return switch (this) { - inline .hot, .watch => |w| w.indexOf(hash), - else => null, - }; - } - - pub inline fn addFile( - this: ImportWatcher, - fd: StoredFileDescriptorType, - file_path: string, - hash: Watcher.HashType, - loader: options.Loader, - dir_fd: StoredFileDescriptorType, - package_json: ?*PackageJSON, - comptime copy_file_path: bool, - ) bun.JSC.Maybe(void) { - return switch (this) { - inline .hot, .watch => |watcher| watcher.addFile( - fd, - file_path, - hash, - loader, - dir_fd, - package_json, - copy_file_path, - ), - .none => .{ .result = {} }, - }; - } -}; - -pub const PlatformEventLoop = if (Environment.isPosix) uws.Loop else bun.Async.Loop; - -export fn Bun__setTLSRejectUnauthorizedValue(value: i32) void { - VirtualMachine.get().default_tls_reject_unauthorized = value != 0; -} - -export fn Bun__getTLSRejectUnauthorizedValue() i32 { - return if (JSC.VirtualMachine.get().getTLSRejectUnauthorized()) 1 else 0; -} - -export fn Bun__setVerboseFetchValue(value: i32) void { - VirtualMachine.get().default_verbose_fetch = if (value == 1) .headers else if (value == 2) .curl else .none; -} - -export fn Bun__getVerboseFetchValue() i32 { - return switch (JSC.VirtualMachine.get().getVerboseFetch()) { - .none => 0, - .headers => 1, - .curl => 2, - }; -} - -const body_value_pool_size = if (bun.heap_breakdown.enabled) 0 else 256; -pub const BodyValueRef = bun.HiveRef(JSC.WebCore.Body.Value, body_value_pool_size); -const BodyValueHiveAllocator = bun.HiveArray(BodyValueRef, body_value_pool_size).Fallback; - -const AutoKiller = struct { - const log = Output.scoped(.AutoKiller, true); - processes: std.AutoArrayHashMapUnmanaged(*bun.spawn.Process, void) = .{}, - enabled: bool = false, - ever_enabled: bool = false, - - pub fn enable(this: *AutoKiller) void { - this.enabled = true; - this.ever_enabled = true; - } - - pub fn disable(this: *AutoKiller) void { - this.enabled = false; - } - - pub const Result = struct { - processes: u32 = 0, - - pub fn format(self: @This(), comptime _: []const u8, _: anytype, writer: anytype) !void { - switch (self.processes) { - 0 => {}, - 1 => { - try writer.writeAll("killed 1 dangling process"); - }, - else => { - try std.fmt.format(writer, "killed {d} dangling processes", .{self.processes}); - }, - } - } - }; - - pub fn kill(this: *AutoKiller) Result { - return .{ - .processes = this.killProcesses(), - }; - } - - fn killProcesses(this: *AutoKiller) u32 { - var count: u32 = 0; - while (this.processes.pop()) |process| { - if (!process.key.hasExited()) { - log("process.kill {d}", .{process.key.pid}); - count += @as(u32, @intFromBool(process.key.kill(@intFromEnum(bun.SignalCode.default)) == .result)); - } - } - return count; - } - - pub fn clear(this: *AutoKiller) void { - if (this.processes.capacity() > 256) { - this.processes.clearAndFree(bun.default_allocator); - } - - this.processes.clearRetainingCapacity(); - } - - pub fn onSubprocessSpawn(this: *AutoKiller, process: *bun.spawn.Process) void { - if (this.enabled) - this.processes.put(bun.default_allocator, process, {}) catch {}; - } - - pub fn onSubprocessExit(this: *AutoKiller, process: *bun.spawn.Process) void { - if (this.ever_enabled) - _ = this.processes.swapRemove(process); - } - - pub fn deinit(this: *AutoKiller) void { - this.processes.deinit(bun.default_allocator); - } -}; - -/// TODO: rename this to ScriptExecutionContext -/// This is the shared global state for a single JS instance execution -/// Today, Bun is one VM per thread, so the name "VirtualMachine" sort of makes sense -/// However, that may change in the future -pub const VirtualMachine = struct { - global: *JSGlobalObject, - allocator: std.mem.Allocator, - has_loaded_constructors: bool = false, - transpiler: Transpiler, - bun_watcher: ImportWatcher = .{ .none = {} }, - console: *ConsoleObject, - log: *logger.Log, - main: string = "", - main_is_html_entrypoint: bool = false, - main_resolved_path: bun.String = bun.String.empty, - main_hash: u32 = 0, - entry_point: ServerEntryPoint = undefined, - origin: URL = URL{}, - node_fs: ?*Node.NodeFS = null, - timer: Bun.Timer.All, - event_loop_handle: ?*PlatformEventLoop = null, - pending_unref_counter: i32 = 0, - preload: []const string = &[_][]const u8{}, - unhandled_pending_rejection_to_capture: ?*JSValue = null, - standalone_module_graph: ?*bun.StandaloneModuleGraph = null, - smol: bool = false, - dns_result_order: DNSResolver.Order = .verbatim, - - hot_reload: bun.CLI.Command.HotReload = .none, - jsc: *VM = undefined, - - /// hide bun:wrap from stack traces - /// bun:wrap is very noisy - hide_bun_stackframes: bool = true, - - is_printing_plugin: bool = false, - is_shutting_down: bool = false, - plugin_runner: ?PluginRunner = null, - is_main_thread: bool = false, - last_reported_error_for_dedupe: JSValue = .zero, - exit_handler: ExitHandler = .{}, - - default_tls_reject_unauthorized: ?bool = null, - default_verbose_fetch: ?bun.http.HTTPVerboseLevel = null, - - /// Do not access this field directly! - /// - /// It exists in the VirtualMachine struct so that we don't accidentally - /// make a stack copy of it only use it through source_mappings. - /// - /// This proposal could let us safely move it back https://github.com/ziglang/zig/issues/7769 - saved_source_map_table: SavedSourceMap.HashTable = undefined, - source_mappings: SavedSourceMap = undefined, - - arena: *Arena = undefined, - has_loaded: bool = false, - - transpiled_count: usize = 0, - resolved_count: usize = 0, - had_errors: bool = false, - - macros: MacroMap, - macro_entry_points: std.AutoArrayHashMap(i32, *MacroEntryPoint), - macro_mode: bool = false, - no_macros: bool = false, - auto_killer: AutoKiller = .{ - .enabled = false, - }, - - has_any_macro_remappings: bool = false, - is_from_devserver: bool = false, - has_enabled_macro_mode: bool = false, - - /// Used by bun:test to set global hooks for beforeAll, beforeEach, etc. - is_in_preload: bool = false, - has_patched_run_main: bool = false, - - transpiler_store: JSC.RuntimeTranspilerStore, - - after_event_loop_callback_ctx: ?*anyopaque = null, - after_event_loop_callback: ?OpaqueCallback = null, - - remap_stack_frames_mutex: bun.Mutex = .{}, - - /// The arguments used to launch the process _after_ the script name and bun and any flags applied to Bun - /// "bun run foo --bar" - /// ["--bar"] - /// "bun run foo baz --bar" - /// ["baz", "--bar"] - /// "bun run foo - /// [] - /// "bun foo --bar" - /// ["--bar"] - /// "bun foo baz --bar" - /// ["baz", "--bar"] - /// "bun foo - /// [] - argv: []const []const u8 = &[_][]const u8{}, - - origin_timer: std.time.Timer = undefined, - origin_timestamp: u64 = 0, - macro_event_loop: EventLoop = EventLoop{}, - regular_event_loop: EventLoop = EventLoop{}, - event_loop: *EventLoop = undefined, - - ref_strings: JSC.RefString.Map = undefined, - ref_strings_mutex: Lock = undefined, - - active_tasks: usize = 0, - - rare_data: ?*JSC.RareData = null, - is_us_loop_entered: bool = false, - pending_internal_promise: ?*JSInternalPromise = null, - entry_point_result: struct { - value: JSC.Strong = .empty, - cjs_set_value: bool = false, - } = .{}, - - auto_install_dependencies: bool = false, - - onUnhandledRejection: *const OnUnhandledRejection = defaultOnUnhandledRejection, - onUnhandledRejectionCtx: ?*anyopaque = null, - onUnhandledRejectionExceptionList: ?*ExceptionList = null, - unhandled_error_counter: usize = 0, - is_handling_uncaught_exception: bool = false, - exit_on_uncaught_exception: bool = false, - - modules: ModuleLoader.AsyncModule.Queue = .{}, - aggressive_garbage_collection: GCLevel = GCLevel.none, - - module_loader: ModuleLoader = .{}, - - gc_controller: JSC.GarbageCollectionController = .{}, - worker: ?*JSC.WebWorker = null, - ipc: ?IPCInstanceUnion = null, - - debugger: ?Debugger = null, - has_started_debugger: bool = false, - has_terminated: bool = false, - - debug_thread_id: if (Environment.allow_assert) std.Thread.Id else void, - - body_value_hive_allocator: BodyValueHiveAllocator = undefined, - - is_inside_deferred_task_queue: bool = false, - - // defaults off. .on("message") will set it to true unless overridden - // process.channel.unref() will set it to false and mark it overridden - // on disconnect it will be disabled - channel_ref: bun.Async.KeepAlive = .{}, - // if process.channel.ref() or unref() has been called, this is set to true - channel_ref_overridden: bool = false, - // if one disconnect event listener should be ignored - channel_ref_should_ignore_one_disconnect_event_listener: bool = false, - - /// Whether this VM should be destroyed after it exits, even if it is the main thread's VM. - /// Worker VMs are always destroyed on exit, regardless of this setting. Setting this to - /// true may expose bugs that would otherwise only occur using Workers. Controlled by - /// Options.destruct_main_thread_on_exit. - destruct_main_thread_on_exit: bool, - - /// A set of extensions that exist in the require.extensions map. Keys - /// contain the leading '.'. Value is either a loader for built in - /// functions, or an index into JSCommonJSExtensions. - /// - /// `.keys() == transpiler.resolver.opts.extra_cjs_extensions`, so - /// mutations in this map must update the resolver. - commonjs_custom_extensions: bun.StringArrayHashMapUnmanaged(node_module_module.CustomLoader.Packed) = .empty, - /// Incremented when the `require.extensions` for a built-in extension is mutated. - /// An example is mutating `require.extensions['.js']` to intercept all '.js' files. - /// The value is decremented when defaults are restored. - has_mutated_built_in_extensions: u32 = 0, - - pub const OnUnhandledRejection = fn (*VirtualMachine, globalObject: *JSGlobalObject, JSValue) void; - - pub const OnException = fn (*ZigException) void; - - pub fn initRequestBodyValue(this: *VirtualMachine, body: JSC.WebCore.Body.Value) !*BodyValueRef { - return BodyValueRef.init(body, &this.body_value_hive_allocator); - } - - pub threadlocal var is_bundler_thread_for_bytecode_cache: bool = false; - - pub fn uwsLoop(this: *const VirtualMachine) *uws.Loop { - if (comptime Environment.isPosix) { - if (Environment.allow_assert) { - return this.event_loop_handle orelse @panic("uws event_loop_handle is null"); - } - return this.event_loop_handle.?; - } - - return uws.Loop.get(); - } - - pub fn uvLoop(this: *const VirtualMachine) *bun.Async.Loop { - if (Environment.allow_assert) { - return this.event_loop_handle orelse @panic("libuv event_loop_handle is null"); - } - return this.event_loop_handle.?; - } - - pub fn isMainThread(this: *const VirtualMachine) bool { - return this.worker == null; - } - - pub fn isInspectorEnabled(this: *const VirtualMachine) bool { - return this.debugger != null; - } - - pub export fn Bun__VirtualMachine__isShuttingDown(this: *const VirtualMachine) callconv(.C) bool { - return this.isShuttingDown(); - } - - pub fn isShuttingDown(this: *const VirtualMachine) bool { - return this.is_shutting_down; - } - - pub fn getTLSRejectUnauthorized(this: *const VirtualMachine) bool { - return this.default_tls_reject_unauthorized orelse this.transpiler.env.getTLSRejectUnauthorized(); - } - - pub fn onSubprocessSpawn(this: *VirtualMachine, process: *bun.spawn.Process) void { - this.auto_killer.onSubprocessSpawn(process); - } - - pub fn onSubprocessExit(this: *VirtualMachine, process: *bun.spawn.Process) void { - this.auto_killer.onSubprocessExit(process); - } - - pub fn getVerboseFetch(this: *VirtualMachine) bun.http.HTTPVerboseLevel { - return this.default_verbose_fetch orelse { - if (this.transpiler.env.get("BUN_CONFIG_VERBOSE_FETCH")) |verbose_fetch| { - if (strings.eqlComptime(verbose_fetch, "true") or strings.eqlComptime(verbose_fetch, "1")) { - this.default_verbose_fetch = .headers; - return .headers; - } else if (strings.eqlComptime(verbose_fetch, "curl")) { - this.default_verbose_fetch = .curl; - return .curl; - } - } - this.default_verbose_fetch = .none; - return .none; - }; - } - - pub const VMHolder = struct { - pub threadlocal var vm: ?*VirtualMachine = null; - pub threadlocal var cached_global_object: ?*JSGlobalObject = null; - pub var main_thread_vm: ?*VirtualMachine = null; - pub export fn Bun__setDefaultGlobalObject(global: *JSGlobalObject) void { - if (vm) |vm_instance| { - vm_instance.global = global; - - // Ensure this is always set when it should be. - if (vm_instance.is_main_thread) { - VMHolder.main_thread_vm = vm_instance; - } - } - - cached_global_object = global; - } - - pub export fn Bun__getDefaultGlobalObject() ?*JSGlobalObject { - return cached_global_object orelse { - if (vm) |vm_instance| { - cached_global_object = vm_instance.global; - } - return null; - }; - } - - pub export fn Bun__thisThreadHasVM() bool { - return vm != null; - } - }; - - pub inline fn get() *VirtualMachine { - return VMHolder.vm.?; - } - - pub fn getMainThreadVM() ?*VirtualMachine { - return VMHolder.main_thread_vm; - } - - pub fn mimeType(this: *VirtualMachine, str: []const u8) ?bun.http.MimeType { - return this.rareData().mimeTypeFromString(this.allocator, str); - } - - pub fn onAfterEventLoop(this: *VirtualMachine) void { - if (this.after_event_loop_callback) |cb| { - const ctx = this.after_event_loop_callback_ctx; - this.after_event_loop_callback = null; - this.after_event_loop_callback_ctx = null; - cb(ctx); - } - } - - pub fn isEventLoopAliveExcludingImmediates(vm: *const VirtualMachine) bool { - return vm.unhandled_error_counter == 0 and - (@intFromBool(vm.event_loop_handle.?.isActive()) + - vm.active_tasks + - vm.event_loop.tasks.count + - @intFromBool(vm.event_loop.hasPendingRefs()) > 0); - } - - pub fn isEventLoopAlive(vm: *const VirtualMachine) bool { - return vm.isEventLoopAliveExcludingImmediates() or - // We need to keep running in this case so that immediate tasks get run. But immediates - // intentionally don't make the event loop _active_ so we need to check for them - // separately. - vm.event_loop.immediate_tasks.items.len > 0 or - vm.event_loop.next_immediate_tasks.items.len > 0; - } - - pub fn wakeup(this: *VirtualMachine) void { - this.eventLoop().wakeup(); - } - - const SourceMapHandlerGetter = struct { - vm: *VirtualMachine, - printer: *js_printer.BufferPrinter, - - pub fn get(this: *SourceMapHandlerGetter) js_printer.SourceMapHandler { - if (this.vm.debugger == null or this.vm.debugger.?.mode == .connect) { - return SavedSourceMap.SourceMapHandler.init(&this.vm.source_mappings); - } - - return js_printer.SourceMapHandler.For(SourceMapHandlerGetter, onChunk).init(this); - } - - /// When the inspector is enabled, we want to generate an inline sourcemap. - /// And, for now, we also store it in source_mappings like normal - /// This is hideously expensive memory-wise... - pub fn onChunk(this: *SourceMapHandlerGetter, chunk: SourceMap.Chunk, source: logger.Source) anyerror!void { - var temp_json_buffer = bun.MutableString.initEmpty(bun.default_allocator); - defer temp_json_buffer.deinit(); - temp_json_buffer = try chunk.printSourceMapContentsAtOffset(source, temp_json_buffer, true, SavedSourceMap.vlq_offset, true); - const source_map_url_prefix_start = "//# sourceMappingURL=data:application/json;base64,"; - // TODO: do we need to %-encode the path? - const source_url_len = source.path.text.len; - const source_mapping_url = "\n//# sourceURL="; - const prefix_len = source_map_url_prefix_start.len + source_mapping_url.len + source_url_len; - - try this.vm.source_mappings.putMappings(source, chunk.buffer); - const encode_len = bun.base64.encodeLen(temp_json_buffer.list.items); - try this.printer.ctx.buffer.growIfNeeded(encode_len + prefix_len + 2); - this.printer.ctx.buffer.appendAssumeCapacity("\n" ++ source_map_url_prefix_start); - _ = bun.base64.encode(this.printer.ctx.buffer.list.items.ptr[this.printer.ctx.buffer.len()..this.printer.ctx.buffer.list.capacity], temp_json_buffer.list.items); - this.printer.ctx.buffer.list.items.len += encode_len; - this.printer.ctx.buffer.appendAssumeCapacity(source_mapping_url); - // TODO: do we need to %-encode the path? - this.printer.ctx.buffer.appendAssumeCapacity(source.path.text); - try this.printer.ctx.buffer.append("\n"); - } - }; - - pub inline fn sourceMapHandler(this: *VirtualMachine, printer: *js_printer.BufferPrinter) SourceMapHandlerGetter { - return SourceMapHandlerGetter{ - .vm = this, - .printer = printer, - }; - } - - pub const GCLevel = enum(u3) { - none = 0, - mild = 1, - aggressive = 2, - }; - - pub threadlocal var is_main_thread_vm: bool = false; - - pub const UnhandledRejectionScope = struct { - ctx: ?*anyopaque = null, - onUnhandledRejection: *const OnUnhandledRejection = undefined, - count: usize = 0, - - pub fn apply(this: *UnhandledRejectionScope, vm: *JSC.VirtualMachine) void { - vm.onUnhandledRejection = this.onUnhandledRejection; - vm.onUnhandledRejectionCtx = this.ctx; - vm.unhandled_error_counter = this.count; - } - }; - - pub fn onQuietUnhandledRejectionHandler(this: *VirtualMachine, _: *JSGlobalObject, _: JSValue) void { - this.unhandled_error_counter += 1; - } - - pub fn onQuietUnhandledRejectionHandlerCaptureValue(this: *VirtualMachine, _: *JSGlobalObject, value: JSValue) void { - this.unhandled_error_counter += 1; - value.ensureStillAlive(); - if (this.unhandled_pending_rejection_to_capture) |ptr| { - ptr.* = value; - } - } - - pub fn unhandledRejectionScope(this: *VirtualMachine) UnhandledRejectionScope { - return .{ - .onUnhandledRejection = this.onUnhandledRejection, - .ctx = this.onUnhandledRejectionCtx, - .count = this.unhandled_error_counter, - }; - } - - fn ensureSourceCodePrinter(this: *VirtualMachine) void { - if (source_code_printer == null) { - const allocator = if (bun.heap_breakdown.enabled) bun.heap_breakdown.namedAllocator("SourceCode") else this.allocator; - const writer = js_printer.BufferWriter.init(allocator); - source_code_printer = allocator.create(js_printer.BufferPrinter) catch unreachable; - source_code_printer.?.* = js_printer.BufferPrinter.init(writer); - source_code_printer.?.ctx.append_null_byte = false; - } - } - - pub fn loadExtraEnvAndSourceCodePrinter(this: *VirtualMachine) void { - var map = this.transpiler.env.map; - - ensureSourceCodePrinter(this); - - if (map.get("BUN_SHOW_BUN_STACKFRAMES") != null) { - this.hide_bun_stackframes = false; - } - - if (bun.getRuntimeFeatureFlag("BUN_FEATURE_FLAG_DISABLE_ASYNC_TRANSPILER")) { - this.transpiler_store.enabled = false; - } - - if (map.map.fetchSwapRemove("NODE_CHANNEL_FD")) |kv| { - const fd_s = kv.value.value; - const mode = if (map.map.fetchSwapRemove("NODE_CHANNEL_SERIALIZATION_MODE")) |mode_kv| - IPC.Mode.fromString(mode_kv.value.value) orelse .json - else - .json; - - IPC.log("IPC environment variables: NODE_CHANNEL_FD={s}, NODE_CHANNEL_SERIALIZATION_MODE={s}", .{ fd_s, @tagName(mode) }); - if (std.fmt.parseInt(u31, fd_s, 10)) |fd| { - this.initIPCInstance(.fromUV(fd), mode); - } else |_| { - Output.warn("Failed to parse IPC channel number '{s}'", .{fd_s}); - } - } - - // Node.js checks if this are set to "1" and no other value - if (map.get("NODE_PRESERVE_SYMLINKS")) |value| { - this.transpiler.resolver.opts.preserve_symlinks = bun.strings.eqlComptime(value, "1"); - } - - if (map.get("BUN_GARBAGE_COLLECTOR_LEVEL")) |gc_level| { - // Reuse this flag for other things to avoid unnecessary hashtable - // lookups on start for obscure flags which we do not want others to - // depend on. - if (map.get("BUN_FEATURE_FLAG_FORCE_WAITER_THREAD") != null) { - bun.spawn.process.WaiterThread.setShouldUseWaiterThread(); - } - - // Only allowed for testing - if (map.get("BUN_FEATURE_FLAG_INTERNAL_FOR_TESTING") != null) { - ModuleLoader.is_allowed_to_use_internal_testing_apis = true; - } - - if (strings.eqlComptime(gc_level, "1")) { - this.aggressive_garbage_collection = .mild; - has_bun_garbage_collector_flag_enabled = true; - } else if (strings.eqlComptime(gc_level, "2")) { - this.aggressive_garbage_collection = .aggressive; - has_bun_garbage_collector_flag_enabled = true; - } - - if (map.get("BUN_FEATURE_FLAG_SYNTHETIC_MEMORY_LIMIT")) |value| { - if (std.fmt.parseInt(usize, value, 10)) |limit| { - synthetic_allocation_limit = limit; - string_allocation_limit = limit; - } else |_| { - Output.panic("BUN_FEATURE_FLAG_SYNTHETIC_MEMORY_LIMIT must be a positive integer", .{}); - } - } - } - } - - extern fn Bun__handleUncaughtException(*JSGlobalObject, err: JSValue, is_rejection: c_int) c_int; - extern fn Bun__handleUnhandledRejection(*JSGlobalObject, reason: JSValue, promise: JSValue) c_int; - - export fn Bun__VirtualMachine__exitDuringUncaughtException(this: *JSC.VirtualMachine) void { - this.exit_on_uncaught_exception = true; - } - - pub fn unhandledRejection(this: *JSC.VirtualMachine, globalObject: *JSGlobalObject, reason: JSValue, promise: JSValue) bool { - if (this.isShuttingDown()) { - Output.debugWarn("unhandledRejection during shutdown.", .{}); - return true; - } - - if (isBunTest) { - this.unhandled_error_counter += 1; - this.onUnhandledRejection(this, globalObject, reason); - return true; - } - - const handled = Bun__handleUnhandledRejection(globalObject, reason, promise) > 0; - if (!handled) { - this.unhandled_error_counter += 1; - this.onUnhandledRejection(this, globalObject, reason); - } - return handled; - } - - pub fn uncaughtException(this: *JSC.VirtualMachine, globalObject: *JSGlobalObject, err: JSValue, is_rejection: bool) bool { - if (this.isShuttingDown()) { - Output.debugWarn("uncaughtException during shutdown.", .{}); - return true; - } - - if (isBunTest) { - this.unhandled_error_counter += 1; - this.onUnhandledRejection(this, globalObject, err); - return true; - } - - if (this.is_handling_uncaught_exception) { - this.runErrorHandler(err, null); - JSC.Process.exit(globalObject, 7); - @panic("Uncaught exception while handling uncaught exception"); - } - if (this.exit_on_uncaught_exception) { - this.runErrorHandler(err, null); - JSC.Process.exit(globalObject, 1); - @panic("made it past Bun__Process__exit"); - } - this.is_handling_uncaught_exception = true; - defer this.is_handling_uncaught_exception = false; - const handled = Bun__handleUncaughtException(globalObject, err.toError() orelse err, if (is_rejection) 1 else 0) > 0; - if (!handled) { - // TODO maybe we want a separate code path for uncaught exceptions - this.unhandled_error_counter += 1; - this.exit_handler.exit_code = 1; - this.onUnhandledRejection(this, globalObject, err); - } - return handled; - } - - pub fn handlePendingInternalPromiseRejection(this: *JSC.VirtualMachine) void { - var promise = this.pending_internal_promise.?; - if (promise.status(this.global.vm()) == .rejected and !promise.isHandled(this.global.vm())) { - _ = this.unhandledRejection(this.global, promise.result(this.global.vm()), promise.asValue()); - promise.setHandled(this.global.vm()); - } - } - - pub fn defaultOnUnhandledRejection(this: *JSC.VirtualMachine, _: *JSGlobalObject, value: JSValue) void { - this.runErrorHandler(value, this.onUnhandledRejectionExceptionList); - } - - pub inline fn packageManager(this: *VirtualMachine) *PackageManager { - return this.transpiler.getPackageManager(); - } - - pub fn garbageCollect(this: *const VirtualMachine, sync: bool) usize { - @branchHint(.cold); - Global.mimalloc_cleanup(false); - if (sync) - return this.global.vm().runGC(true); - - this.global.vm().collectAsync(); - return this.global.vm().heapSize(); - } - - pub inline fn autoGarbageCollect(this: *const VirtualMachine) void { - if (this.aggressive_garbage_collection != .none) { - _ = this.garbageCollect(this.aggressive_garbage_collection == .aggressive); - } - } - - pub fn reload(this: *VirtualMachine, _: *HotReloader.HotReloadTask) void { - Output.debug("Reloading...", .{}); - const should_clear_terminal = !this.transpiler.env.hasSetNoClearTerminalOnReload(!Output.enable_ansi_colors); - if (this.hot_reload == .watch) { - Output.flush(); - bun.reloadProcess( - bun.default_allocator, - should_clear_terminal, - false, - ); - } - - if (should_clear_terminal) { - Output.flush(); - Output.disableBuffering(); - Output.resetTerminalAll(); - Output.enableBuffering(); - } - - this.global.reload(); - this.pending_internal_promise = this.reloadEntryPoint(this.main) catch @panic("Failed to reload"); - } - - pub inline fn nodeFS(this: *VirtualMachine) *Node.NodeFS { - return this.node_fs orelse brk: { - this.node_fs = bun.default_allocator.create(Node.NodeFS) catch unreachable; - this.node_fs.?.* = Node.NodeFS{ - // only used when standalone module graph is enabled - .vm = if (this.standalone_module_graph != null) this else null, - }; - break :brk this.node_fs.?; - }; - } - - pub inline fn rareData(this: *VirtualMachine) *JSC.RareData { - return this.rare_data orelse brk: { - this.rare_data = this.allocator.create(JSC.RareData) catch unreachable; - this.rare_data.?.* = .{}; - break :brk this.rare_data.?; - }; - } - - pub inline fn eventLoop(this: *VirtualMachine) *EventLoop { - return this.event_loop; - } - - pub fn prepareLoop(_: *VirtualMachine) void {} - - pub fn enterUWSLoop(this: *VirtualMachine) void { - var loop = this.event_loop_handle.?; - loop.run(); - } - - pub fn onBeforeExit(this: *VirtualMachine) void { - this.exit_handler.dispatchOnBeforeExit(); - var dispatch = false; - while (true) { - while (this.isEventLoopAlive()) : (dispatch = true) { - this.tick(); - this.eventLoop().autoTickActive(); - } - - if (dispatch) { - this.exit_handler.dispatchOnBeforeExit(); - dispatch = false; - - if (this.isEventLoopAlive()) continue; - } - - break; - } - } - - pub fn scriptExecutionStatus(this: *const VirtualMachine) callconv(.C) JSC.ScriptExecutionStatus { - if (this.is_shutting_down) { - return .stopped; - } - - if (this.worker) |worker| { - if (worker.hasRequestedTerminate()) { - return .stopped; - } - } - - return .running; - } - - pub fn specifierIsEvalEntryPoint(this: *VirtualMachine, specifier: JSValue) callconv(.C) bool { - if (this.module_loader.eval_source) |eval_source| { - var specifier_str = specifier.toBunString(this.global) catch @panic("unexpected exception"); - defer specifier_str.deref(); - return specifier_str.eqlUTF8(eval_source.path.text); - } - - return false; - } - - pub fn setEntryPointEvalResultESM(this: *VirtualMachine, result: JSValue) callconv(.C) void { - // allow esm evaluate to set value multiple times - if (!this.entry_point_result.cjs_set_value) { - this.entry_point_result.value.set(this.global, result); - } - } - - pub fn setEntryPointEvalResultCJS(this: *VirtualMachine, value: JSValue) callconv(.C) void { - if (!this.entry_point_result.value.has()) { - this.entry_point_result.value.set(this.global, value); - this.entry_point_result.cjs_set_value = true; - } - } - - comptime { - @export(&scriptExecutionStatus, .{ .name = "Bun__VM__scriptExecutionStatus" }); - @export(&setEntryPointEvalResultESM, .{ .name = "Bun__VM__setEntryPointEvalResultESM" }); - @export(&setEntryPointEvalResultCJS, .{ .name = "Bun__VM__setEntryPointEvalResultCJS" }); - @export(&specifierIsEvalEntryPoint, .{ .name = "Bun__VM__specifierIsEvalEntryPoint" }); - } - - pub fn onExit(this: *VirtualMachine) void { - this.exit_handler.dispatchOnExit(); - this.is_shutting_down = true; - - const rare_data = this.rare_data orelse return; - defer rare_data.cleanup_hooks.clearAndFree(bun.default_allocator); - // Make sure we run new cleanup hooks introduced by running cleanup hooks - while (rare_data.cleanup_hooks.items.len > 0) { - var hooks = rare_data.cleanup_hooks; - defer hooks.deinit(bun.default_allocator); - rare_data.cleanup_hooks = .{}; - for (hooks.items) |hook| { - hook.execute(); - } - } - } - - extern fn Zig__GlobalObject__destructOnExit(*JSGlobalObject) void; - - pub fn globalExit(this: *VirtualMachine) noreturn { - if (this.destruct_main_thread_on_exit and this.is_main_thread) { - Zig__GlobalObject__destructOnExit(this.global); - this.deinit(); - } - bun.Global.exit(this.exit_handler.exit_code); - } - - pub fn nextAsyncTaskID(this: *VirtualMachine) u64 { - var debugger: *Debugger = &(this.debugger orelse return 0); - debugger.next_debugger_id +%= 1; - return debugger.next_debugger_id; - } - - pub fn hotMap(this: *VirtualMachine) ?*JSC.RareData.HotMap { - if (this.hot_reload != .hot) { - return null; - } - - return this.rareData().hotMap(this.allocator); - } - - pub var has_created_debugger: bool = false; - - pub const TestReporterAgent = struct { - handle: ?*Handle = null, - const debug = Output.scoped(.TestReporterAgent, false); - pub const TestStatus = enum(u8) { - pass, - fail, - timeout, - skip, - todo, - }; - pub const Handle = opaque { - extern "c" fn Bun__TestReporterAgentReportTestFound(agent: *Handle, callFrame: *JSC.CallFrame, testId: c_int, name: *String) void; - extern "c" fn Bun__TestReporterAgentReportTestStart(agent: *Handle, testId: c_int) void; - extern "c" fn Bun__TestReporterAgentReportTestEnd(agent: *Handle, testId: c_int, bunTestStatus: TestStatus, elapsed: f64) void; - - pub fn reportTestFound(this: *Handle, callFrame: *JSC.CallFrame, testId: i32, name: *String) void { - Bun__TestReporterAgentReportTestFound(this, callFrame, testId, name); - } - - pub fn reportTestStart(this: *Handle, testId: c_int) void { - Bun__TestReporterAgentReportTestStart(this, testId); - } - - pub fn reportTestEnd(this: *Handle, testId: c_int, bunTestStatus: TestStatus, elapsed: f64) void { - Bun__TestReporterAgentReportTestEnd(this, testId, bunTestStatus, elapsed); - } - }; - pub export fn Bun__TestReporterAgentEnable(agent: *Handle) void { - if (JSC.VirtualMachine.get().debugger) |*debugger| { - debug("enable", .{}); - debugger.test_reporter_agent.handle = agent; - } - } - pub export fn Bun__TestReporterAgentDisable(agent: *Handle) void { - _ = agent; // autofix - if (JSC.VirtualMachine.get().debugger) |*debugger| { - debug("disable", .{}); - debugger.test_reporter_agent.handle = null; - } - } - - /// Caller must ensure that it is enabled first. - /// - /// Since we may have to call .deinit on the name string. - pub fn reportTestFound(this: TestReporterAgent, callFrame: *JSC.CallFrame, test_id: i32, name: *bun.String) void { - debug("reportTestFound", .{}); - - this.handle.?.reportTestFound(callFrame, test_id, name); - } - - /// Caller must ensure that it is enabled first. - pub fn reportTestStart(this: TestReporterAgent, test_id: i32) void { - debug("reportTestStart", .{}); - this.handle.?.reportTestStart(test_id); - } - - /// Caller must ensure that it is enabled first. - pub fn reportTestEnd(this: TestReporterAgent, test_id: i32, bunTestStatus: TestStatus, elapsed: f64) void { - debug("reportTestEnd", .{}); - this.handle.?.reportTestEnd(test_id, bunTestStatus, elapsed); - } - - pub fn isEnabled(this: TestReporterAgent) bool { - return this.handle != null; - } - }; - - pub const LifecycleAgent = struct { - handle: ?*Handle = null, - const debug = Output.scoped(.LifecycleAgent, false); - - pub const Handle = opaque { - extern "c" fn Bun__LifecycleAgentReportReload(agent: *Handle) void; - extern "c" fn Bun__LifecycleAgentReportError(agent: *Handle, exception: *ZigException) void; - extern "c" fn Bun__LifecycleAgentPreventExit(agent: *Handle) void; - extern "c" fn Bun__LifecycleAgentStopPreventingExit(agent: *Handle) void; - - pub fn preventExit(this: *Handle) void { - Bun__LifecycleAgentPreventExit(this); - } - - pub fn stopPreventingExit(this: *Handle) void { - Bun__LifecycleAgentStopPreventingExit(this); - } - - pub fn reportReload(this: *Handle) void { - debug("reportReload", .{}); - Bun__LifecycleAgentReportReload(this); - } - - pub fn reportError(this: *Handle, exception: *ZigException) void { - debug("reportError", .{}); - Bun__LifecycleAgentReportError(this, exception); - } - }; - - pub export fn Bun__LifecycleAgentEnable(agent: *Handle) void { - if (JSC.VirtualMachine.get().debugger) |*debugger| { - debug("enable", .{}); - debugger.lifecycle_reporter_agent.handle = agent; - } - } - - pub export fn Bun__LifecycleAgentDisable(agent: *Handle) void { - _ = agent; // autofix - if (JSC.VirtualMachine.get().debugger) |*debugger| { - debug("disable", .{}); - debugger.lifecycle_reporter_agent.handle = null; - } - } - - pub fn reportReload(this: *LifecycleAgent) void { - if (this.handle) |handle| { - handle.reportReload(); - } - } - - pub fn reportError(this: *LifecycleAgent, exception: *ZigException) void { - if (this.handle) |handle| { - handle.reportError(exception); - } - } - - pub fn isEnabled(this: *const LifecycleAgent) bool { - return this.handle != null; - } - }; - - pub const Debugger = struct { - path_or_port: ?[]const u8 = null, - from_environment_variable: []const u8 = "", - script_execution_context_id: u32 = 0, - next_debugger_id: u64 = 1, - poll_ref: Async.KeepAlive = .{}, - wait_for_connection: Wait = .off, - // wait_for_connection: bool = false, - set_breakpoint_on_first_line: bool = false, - mode: enum { - /// Bun acts as the server. https://debug.bun.sh/ uses this - listen, - /// Bun connects to this path. The VSCode extension uses this. - connect, - } = .listen, - - test_reporter_agent: TestReporterAgent = .{}, - lifecycle_reporter_agent: LifecycleAgent = .{}, - must_block_until_connected: bool = false, - - pub const Wait = enum { off, shortly, forever }; - - pub const log = Output.scoped(.debugger, false); - - extern "c" fn Bun__createJSDebugger(*JSGlobalObject) u32; - extern "c" fn Bun__ensureDebugger(u32, bool) void; - extern "c" fn Bun__startJSDebuggerThread(*JSGlobalObject, u32, *bun.String, c_int, bool) void; - var futex_atomic: std.atomic.Value(u32) = undefined; - - pub fn waitForDebuggerIfNecessary(this: *VirtualMachine) void { - const debugger = &(this.debugger orelse return); - if (!debugger.must_block_until_connected) { - return; - } - defer debugger.must_block_until_connected = false; - - Debugger.log("spin", .{}); - while (futex_atomic.load(.monotonic) > 0) { - bun.Futex.waitForever(&futex_atomic, 1); - } - if (comptime Environment.enable_logs) - Debugger.log("waitForDebugger: {}", .{Output.ElapsedFormatter{ - .colors = Output.enable_ansi_colors_stderr, - .duration_ns = @truncate(@as(u128, @intCast(std.time.nanoTimestamp() - bun.CLI.start_time))), - }}); - - Bun__ensureDebugger(debugger.script_execution_context_id, debugger.wait_for_connection != .off); - - // Sleep up to 30ms for automatic inspection. - const wait_for_connection_delay_ms = 30; - - var deadline: bun.timespec = if (debugger.wait_for_connection == .shortly) bun.timespec.now().addMs(wait_for_connection_delay_ms) else undefined; - - if (comptime Environment.isWindows) { - // TODO: remove this when tickWithTimeout actually works properly on Windows. - if (debugger.wait_for_connection == .shortly) { - uv.uv_update_time(this.uvLoop()); - var timer = bun.default_allocator.create(uv.Timer) catch bun.outOfMemory(); - timer.* = std.mem.zeroes(uv.Timer); - timer.init(this.uvLoop()); - const onDebuggerTimer = struct { - fn call(handle: *uv.Timer) callconv(.C) void { - const vm = JSC.VirtualMachine.get(); - vm.debugger.?.poll_ref.unref(vm); - uv.uv_close(@ptrCast(handle), deinitTimer); - } - - fn deinitTimer(handle: *anyopaque) callconv(.C) void { - bun.default_allocator.destroy(@as(*uv.Timer, @alignCast(@ptrCast(handle)))); - } - }.call; - timer.start(wait_for_connection_delay_ms, 0, &onDebuggerTimer); - timer.ref(); - } - } - - while (debugger.wait_for_connection != .off) { - this.eventLoop().tick(); - switch (debugger.wait_for_connection) { - .forever => { - this.eventLoop().autoTickActive(); - - if (comptime Environment.enable_logs) - log("waited: {}", .{std.fmt.fmtDuration(@intCast(@as(i64, @truncate(std.time.nanoTimestamp() - bun.CLI.start_time))))}); - }, - .shortly => { - // Handle .incrementRefConcurrently - if (comptime Environment.isPosix) { - const pending_unref = this.pending_unref_counter; - if (pending_unref > 0) { - this.pending_unref_counter = 0; - this.uwsLoop().unrefCount(pending_unref); - } - } - - this.uwsLoop().tickWithTimeout(&deadline); - - if (comptime Environment.enable_logs) - log("waited: {}", .{std.fmt.fmtDuration(@intCast(@as(i64, @truncate(std.time.nanoTimestamp() - bun.CLI.start_time))))}); - - const elapsed = bun.timespec.now(); - if (elapsed.order(&deadline) != .lt) { - debugger.poll_ref.unref(this); - log("Timed out waiting for the debugger", .{}); - break; - } - }, - .off => { - break; - }, - } - } - } - - pub fn create(this: *VirtualMachine, globalObject: *JSGlobalObject) !void { - log("create", .{}); - JSC.markBinding(@src()); - if (!has_created_debugger) { - has_created_debugger = true; - std.mem.doNotOptimizeAway(&TestReporterAgent.Bun__TestReporterAgentDisable); - std.mem.doNotOptimizeAway(&LifecycleAgent.Bun__LifecycleAgentDisable); - std.mem.doNotOptimizeAway(&TestReporterAgent.Bun__TestReporterAgentEnable); - std.mem.doNotOptimizeAway(&LifecycleAgent.Bun__LifecycleAgentEnable); - var debugger = &this.debugger.?; - debugger.script_execution_context_id = Bun__createJSDebugger(globalObject); - if (!this.has_started_debugger) { - this.has_started_debugger = true; - futex_atomic = std.atomic.Value(u32).init(0); - var thread = try std.Thread.spawn(.{}, startJSDebuggerThread, .{this}); - thread.detach(); - } - this.eventLoop().ensureWaker(); - - if (debugger.wait_for_connection != .off) { - debugger.poll_ref.ref(this); - debugger.must_block_until_connected = true; - } - } - } - - pub fn startJSDebuggerThread(other_vm: *VirtualMachine) void { - var arena = bun.MimallocArena.init() catch unreachable; - Output.Source.configureNamedThread("Debugger"); - log("startJSDebuggerThread", .{}); - JSC.markBinding(@src()); - - var vm = JSC.VirtualMachine.init(.{ - .allocator = arena.allocator(), - .args = std.mem.zeroes(Api.TransformOptions), - .store_fd = false, - }) catch @panic("Failed to create Debugger VM"); - vm.allocator = arena.allocator(); - vm.arena = &arena; - - vm.transpiler.configureDefines() catch @panic("Failed to configure defines"); - vm.is_main_thread = false; - vm.eventLoop().ensureWaker(); - - const callback = OpaqueWrap(VirtualMachine, start); - vm.global.vm().holdAPILock(other_vm, callback); - } - - pub export fn Debugger__didConnect() void { - var this = VirtualMachine.get(); - if (this.debugger.?.wait_for_connection != .off) { - this.debugger.?.wait_for_connection = .off; - this.debugger.?.poll_ref.unref(this); - } - } - - fn start(other_vm: *VirtualMachine) void { - JSC.markBinding(@src()); - - var this = VirtualMachine.get(); - const debugger = other_vm.debugger.?; - const loop = this.eventLoop(); - - if (debugger.from_environment_variable.len > 0) { - var url = bun.String.createUTF8(debugger.from_environment_variable); - - loop.enter(); - defer loop.exit(); - Bun__startJSDebuggerThread(this.global, debugger.script_execution_context_id, &url, 1, debugger.mode == .connect); - } - - if (debugger.path_or_port) |path_or_port| { - var url = bun.String.createUTF8(path_or_port); - - loop.enter(); - defer loop.exit(); - Bun__startJSDebuggerThread(this.global, debugger.script_execution_context_id, &url, 0, debugger.mode == .connect); - } - - this.global.handleRejectedPromises(); - - if (this.log.msgs.items.len > 0) { - this.log.print(Output.errorWriter()) catch {}; - Output.prettyErrorln("\n", .{}); - Output.flush(); - } - - log("wake", .{}); - futex_atomic.store(0, .monotonic); - bun.Futex.wake(&futex_atomic, 1); - - other_vm.eventLoop().wakeup(); - - this.eventLoop().tick(); - - other_vm.eventLoop().wakeup(); - - while (true) { - while (this.isEventLoopAlive()) { - this.tick(); - this.eventLoop().autoTickActive(); - } - - this.eventLoop().tickPossiblyForever(); - } - } - }; - - pub inline fn enqueueTask(this: *VirtualMachine, task: Task) void { - this.eventLoop().enqueueTask(task); - } - - pub inline fn enqueueImmediateTask(this: *VirtualMachine, task: *JSC.BunTimer.ImmediateObject) void { - this.eventLoop().enqueueImmediateTask(task); - } - - pub inline fn enqueueTaskConcurrent(this: *VirtualMachine, task: *JSC.ConcurrentTask) void { - this.eventLoop().enqueueTaskConcurrent(task); - } - - pub fn tick(this: *VirtualMachine) void { - this.eventLoop().tick(); - } - - pub fn waitFor(this: *VirtualMachine, cond: *bool) void { - while (!cond.*) { - this.eventLoop().tick(); - - if (!cond.*) { - this.eventLoop().autoTick(); - } - } - } - - pub fn waitForPromise(this: *VirtualMachine, promise: JSC.AnyPromise) void { - this.eventLoop().waitForPromise(promise); - } - - pub fn waitForTasks(this: *VirtualMachine) void { - this.eventLoop().waitForTasks(); - } - - pub const MacroMap = std.AutoArrayHashMap(i32, bun.JSC.C.JSObjectRef); - - pub fn enableMacroMode(this: *VirtualMachine) void { - JSC.markBinding(@src()); - - if (!this.has_enabled_macro_mode) { - this.has_enabled_macro_mode = true; - this.macro_event_loop.tasks = EventLoop.Queue.init(default_allocator); - this.macro_event_loop.tasks.ensureTotalCapacity(16) catch unreachable; - this.macro_event_loop.global = this.global; - this.macro_event_loop.virtual_machine = this; - this.macro_event_loop.concurrent_tasks = .{}; - ensureSourceCodePrinter(this); - } - - this.transpiler.options.target = .bun_macro; - this.transpiler.resolver.caches.fs.use_alternate_source_cache = true; - this.macro_mode = true; - this.event_loop = &this.macro_event_loop; - Analytics.Features.macros += 1; - this.transpiler_store.enabled = false; - } - - pub fn disableMacroMode(this: *VirtualMachine) void { - this.transpiler.options.target = .bun; - this.transpiler.resolver.caches.fs.use_alternate_source_cache = false; - this.macro_mode = false; - this.event_loop = &this.regular_event_loop; - this.transpiler_store.enabled = true; - } - - pub fn isWatcherEnabled(this: *VirtualMachine) bool { - return this.bun_watcher != .none; - } - - /// Instead of storing timestamp as a i128, we store it as a u64. - /// We subtract the timestamp from Jan 1, 2000 (Y2K) - pub const origin_relative_epoch = 946684800 * std.time.ns_per_s; - fn getOriginTimestamp() u64 { - return @as( - u64, - @truncate(@as( - u128, - // handle if they set their system clock to be before epoch - @intCast(@max( - std.time.nanoTimestamp(), - origin_relative_epoch, - )), - ) - origin_relative_epoch), - ); - } - - pub inline fn isLoaded() bool { - return VMHolder.vm != null; - } - const RuntimeTranspilerStore = JSC.RuntimeTranspilerStore; - pub fn initWithModuleGraph( - opts: Options, - ) !*VirtualMachine { - JSC.markBinding(@src()); - const allocator = opts.allocator; - VMHolder.vm = try allocator.create(VirtualMachine); - const console = try allocator.create(ConsoleObject); - console.* = ConsoleObject.init(Output.errorWriter(), Output.writer()); - const log = opts.log.?; - const transpiler = try Transpiler.init( - allocator, - log, - opts.args, - null, - ); - var vm = VMHolder.vm.?; - - vm.* = VirtualMachine{ - .global = undefined, - .transpiler_store = RuntimeTranspilerStore.init(), - .allocator = allocator, - .entry_point = ServerEntryPoint{}, - .transpiler = transpiler, - .console = console, - .log = log, - .timer = JSC.BunTimer.All.init(), - .origin = transpiler.options.origin, - .saved_source_map_table = SavedSourceMap.HashTable.init(bun.default_allocator), - .source_mappings = undefined, - .macros = MacroMap.init(allocator), - .macro_entry_points = @TypeOf(vm.macro_entry_points).init(allocator), - .origin_timer = std.time.Timer.start() catch @panic("Timers are not supported on this system."), - .origin_timestamp = getOriginTimestamp(), - .ref_strings = JSC.RefString.Map.init(allocator), - .ref_strings_mutex = .{}, - .standalone_module_graph = opts.graph.?, - .debug_thread_id = if (Environment.allow_assert) std.Thread.getCurrentId(), - .destruct_main_thread_on_exit = opts.destruct_main_thread_on_exit, - }; - vm.source_mappings.init(&vm.saved_source_map_table); - vm.regular_event_loop.tasks = EventLoop.Queue.init( - default_allocator, - ); - vm.regular_event_loop.virtual_machine = vm; - vm.regular_event_loop.tasks.ensureUnusedCapacity(64) catch unreachable; - vm.regular_event_loop.concurrent_tasks = .{}; - vm.event_loop = &vm.regular_event_loop; - - vm.transpiler.macro_context = null; - vm.transpiler.resolver.store_fd = false; - vm.transpiler.resolver.prefer_module_field = false; - - vm.transpiler.resolver.onWakePackageManager = .{ - .context = &vm.modules, - .handler = ModuleLoader.AsyncModule.Queue.onWakeHandler, - .onDependencyError = ModuleLoader.AsyncModule.Queue.onDependencyError, - }; - - vm.transpiler.resolver.standalone_module_graph = opts.graph.?; - - // Avoid reading from tsconfig.json & package.json when we're in standalone mode - vm.transpiler.configureLinkerWithAutoJSX(false); - - vm.transpiler.macro_context = js_ast.Macro.MacroContext.init(&vm.transpiler); - if (opts.is_main_thread) { - VMHolder.main_thread_vm = vm; - } - vm.global = JSGlobalObject.create( - vm, - vm.console, - if (opts.is_main_thread) 1 else std.math.maxInt(i32), - false, - false, - null, - ); - vm.regular_event_loop.global = vm.global; - vm.jsc = vm.global.vm(); - uws.Loop.get().internal_loop_data.jsc_vm = vm.jsc; - - vm.configureDebugger(opts.debugger); - vm.body_value_hive_allocator = BodyValueHiveAllocator.init(bun.typedAllocator(JSC.WebCore.Body.Value)); - - return vm; - } - - export fn Bun__isMainThreadVM() callconv(.C) bool { - return get().is_main_thread; - } - - pub const Options = struct { - allocator: std.mem.Allocator, - args: Api.TransformOptions, - log: ?*logger.Log = null, - env_loader: ?*DotEnv.Loader = null, - store_fd: bool = false, - smol: bool = false, - dns_result_order: DNSResolver.Order = .verbatim, - - // --print needs the result from evaluating the main module - eval: bool = false, - - graph: ?*bun.StandaloneModuleGraph = null, - debugger: bun.CLI.Command.Debugger = .{ .unspecified = {} }, - is_main_thread: bool = false, - /// Whether this VM should be destroyed after it exits, even if it is the main thread's VM. - /// Worker VMs are always destroyed on exit, regardless of this setting. Setting this to - /// true may expose bugs that would otherwise only occur using Workers. - destruct_main_thread_on_exit: bool = false, - }; - - pub var is_smol_mode = false; - - pub fn init(opts: Options) !*VirtualMachine { - JSC.markBinding(@src()); - const allocator = opts.allocator; - var log: *logger.Log = undefined; - if (opts.log) |__log| { - log = __log; - } else { - log = try allocator.create(logger.Log); - log.* = logger.Log.init(allocator); - } - - VMHolder.vm = try allocator.create(VirtualMachine); - const console = try allocator.create(ConsoleObject); - console.* = ConsoleObject.init(Output.errorWriter(), Output.writer()); - const transpiler = try Transpiler.init( - allocator, - log, - try Config.configureTransformOptionsForBunVM(allocator, opts.args), - opts.env_loader, - ); - var vm = VMHolder.vm.?; - if (opts.is_main_thread) { - VMHolder.main_thread_vm = vm; - } - vm.* = VirtualMachine{ - .global = undefined, - .transpiler_store = RuntimeTranspilerStore.init(), - .allocator = allocator, - .entry_point = ServerEntryPoint{}, - .transpiler = transpiler, - .console = console, - .log = log, - - .timer = JSC.BunTimer.All.init(), - - .origin = transpiler.options.origin, - - .saved_source_map_table = SavedSourceMap.HashTable.init(bun.default_allocator), - .source_mappings = undefined, - .macros = MacroMap.init(allocator), - .macro_entry_points = @TypeOf(vm.macro_entry_points).init(allocator), - .origin_timer = std.time.Timer.start() catch @panic("Please don't mess with timers."), - .origin_timestamp = getOriginTimestamp(), - .ref_strings = JSC.RefString.Map.init(allocator), - .ref_strings_mutex = .{}, - .debug_thread_id = if (Environment.allow_assert) std.Thread.getCurrentId(), - .destruct_main_thread_on_exit = opts.destruct_main_thread_on_exit, - }; - vm.source_mappings.init(&vm.saved_source_map_table); - vm.regular_event_loop.tasks = EventLoop.Queue.init( - default_allocator, - ); - - vm.regular_event_loop.virtual_machine = vm; - vm.regular_event_loop.tasks.ensureUnusedCapacity(64) catch unreachable; - vm.regular_event_loop.concurrent_tasks = .{}; - vm.event_loop = &vm.regular_event_loop; - - vm.transpiler.macro_context = null; - vm.transpiler.resolver.store_fd = opts.store_fd; - vm.transpiler.resolver.prefer_module_field = false; - vm.transpiler.resolver.opts.preserve_symlinks = opts.args.preserve_symlinks orelse false; - - vm.transpiler.resolver.onWakePackageManager = .{ - .context = &vm.modules, - .handler = ModuleLoader.AsyncModule.Queue.onWakeHandler, - .onDependencyError = ModuleLoader.AsyncModule.Queue.onDependencyError, - }; - - vm.transpiler.configureLinker(); - - vm.transpiler.macro_context = js_ast.Macro.MacroContext.init(&vm.transpiler); - - vm.global = JSGlobalObject.create( - vm, - vm.console, - if (opts.is_main_thread) 1 else std.math.maxInt(i32), - opts.smol, - opts.eval, - null, - ); - vm.regular_event_loop.global = vm.global; - vm.jsc = vm.global.vm(); - uws.Loop.get().internal_loop_data.jsc_vm = vm.jsc; - vm.smol = opts.smol; - vm.dns_result_order = opts.dns_result_order; - - if (opts.smol) - is_smol_mode = opts.smol; - - vm.configureDebugger(opts.debugger); - vm.body_value_hive_allocator = BodyValueHiveAllocator.init(bun.typedAllocator(JSC.WebCore.Body.Value)); - - return vm; - } - - pub inline fn assertOnJSThread(vm: *const VirtualMachine) void { - if (Environment.allow_assert) { - if (vm.debug_thread_id != std.Thread.getCurrentId()) { - std.debug.panic("Expected to be on the JS thread.", .{}); - } - } - } - - fn configureDebugger(this: *VirtualMachine, cli_flag: bun.CLI.Command.Debugger) void { - if (bun.getenvZ("HYPERFINE_RANDOMIZED_ENVIRONMENT_OFFSET") != null) { - return; - } - - const unix = bun.getenvZ("BUN_INSPECT") orelse ""; - const connect_to = bun.getenvZ("BUN_INSPECT_CONNECT_TO") orelse ""; - - const set_breakpoint_on_first_line = unix.len > 0 and strings.endsWith(unix, "?break=1"); // If we should set a breakpoint on the first line - const wait_for_debugger = unix.len > 0 and strings.endsWith(unix, "?wait=1"); // If we should wait for the debugger to connect before starting the event loop - - const wait_for_connection: Debugger.Wait = if (set_breakpoint_on_first_line or wait_for_debugger) .forever else .off; - - switch (cli_flag) { - .unspecified => { - if (unix.len > 0) { - this.debugger = Debugger{ - .path_or_port = null, - .from_environment_variable = unix, - .wait_for_connection = wait_for_connection, - .set_breakpoint_on_first_line = set_breakpoint_on_first_line, - }; - } else if (connect_to.len > 0) { - // This works in the vscode debug terminal because that relies on unix or notify being set, which they - // are in the debug terminal. This branch doesn't reach - this.debugger = Debugger{ - .path_or_port = null, - .from_environment_variable = connect_to, - .wait_for_connection = .off, - .set_breakpoint_on_first_line = false, - .mode = .connect, - }; - } - }, - .enable => { - this.debugger = Debugger{ - .path_or_port = cli_flag.enable.path_or_port, - .from_environment_variable = unix, - .wait_for_connection = if (cli_flag.enable.wait_for_connection) .forever else wait_for_connection, - .set_breakpoint_on_first_line = set_breakpoint_on_first_line or cli_flag.enable.set_breakpoint_on_first_line, - }; - }, - } - - if (this.isInspectorEnabled() and this.debugger.?.mode != .connect) { - this.transpiler.options.minify_identifiers = false; - this.transpiler.options.minify_syntax = false; - this.transpiler.options.minify_whitespace = false; - this.transpiler.options.debugger = true; - } - } - - pub fn initWorker( - worker: *WebWorker, - opts: Options, - ) anyerror!*VirtualMachine { - JSC.markBinding(@src()); - var log: *logger.Log = undefined; - const allocator = opts.allocator; - if (opts.log) |__log| { - log = __log; - } else { - log = try allocator.create(logger.Log); - log.* = logger.Log.init(allocator); - } - - VMHolder.vm = try allocator.create(VirtualMachine); - const console = try allocator.create(ConsoleObject); - console.* = ConsoleObject.init(Output.errorWriter(), Output.writer()); - const transpiler = try Transpiler.init( - allocator, - log, - try Config.configureTransformOptionsForBunVM(allocator, opts.args), - opts.env_loader, - ); - var vm = VMHolder.vm.?; - - vm.* = VirtualMachine{ - .global = undefined, - .allocator = allocator, - .transpiler_store = RuntimeTranspilerStore.init(), - .entry_point = ServerEntryPoint{}, - .transpiler = transpiler, - .console = console, - .log = log, - - .timer = JSC.BunTimer.All.init(), - .origin = transpiler.options.origin, - - .saved_source_map_table = SavedSourceMap.HashTable.init(bun.default_allocator), - .source_mappings = undefined, - .macros = MacroMap.init(allocator), - .macro_entry_points = @TypeOf(vm.macro_entry_points).init(allocator), - .origin_timer = std.time.Timer.start() catch @panic("Please don't mess with timers."), - .origin_timestamp = getOriginTimestamp(), - .ref_strings = JSC.RefString.Map.init(allocator), - .ref_strings_mutex = .{}, - .standalone_module_graph = worker.parent.standalone_module_graph, - .worker = worker, - .debug_thread_id = if (Environment.allow_assert) std.Thread.getCurrentId(), - // This option is irrelevant for Workers - .destruct_main_thread_on_exit = false, - }; - vm.source_mappings.init(&vm.saved_source_map_table); - vm.regular_event_loop.tasks = EventLoop.Queue.init( - default_allocator, - ); - - vm.regular_event_loop.virtual_machine = vm; - vm.regular_event_loop.tasks.ensureUnusedCapacity(64) catch unreachable; - vm.regular_event_loop.concurrent_tasks = .{}; - vm.event_loop = &vm.regular_event_loop; - vm.hot_reload = worker.parent.hot_reload; - vm.transpiler.macro_context = null; - vm.transpiler.resolver.store_fd = opts.store_fd; - vm.transpiler.resolver.prefer_module_field = false; - vm.transpiler.resolver.onWakePackageManager = .{ - .context = &vm.modules, - .handler = ModuleLoader.AsyncModule.Queue.onWakeHandler, - .onDependencyError = ModuleLoader.AsyncModule.Queue.onDependencyError, - }; - vm.transpiler.resolver.standalone_module_graph = opts.graph; - - if (opts.graph == null) { - vm.transpiler.configureLinker(); - } else { - vm.transpiler.configureLinkerWithAutoJSX(false); - } - - vm.smol = opts.smol; - vm.transpiler.macro_context = js_ast.Macro.MacroContext.init(&vm.transpiler); - - vm.global = JSGlobalObject.create( - vm, - vm.console, - @as(i32, @intCast(worker.execution_context_id)), - worker.mini, - opts.eval, - worker.cpp_worker, - ); - vm.regular_event_loop.global = vm.global; - vm.jsc = vm.global.vm(); - uws.Loop.get().internal_loop_data.jsc_vm = vm.jsc; - vm.transpiler.setAllocator(allocator); - vm.body_value_hive_allocator = BodyValueHiveAllocator.init(bun.typedAllocator(JSC.WebCore.Body.Value)); - - return vm; - } - - pub fn initBake(opts: Options) anyerror!*VirtualMachine { - JSC.markBinding(@src()); - const allocator = opts.allocator; - var log: *logger.Log = undefined; - if (opts.log) |__log| { - log = __log; - } else { - log = try allocator.create(logger.Log); - log.* = logger.Log.init(allocator); - } - - VMHolder.vm = try allocator.create(VirtualMachine); - const console = try allocator.create(ConsoleObject); - console.* = ConsoleObject.init(Output.errorWriter(), Output.writer()); - const transpiler = try Transpiler.init( - allocator, - log, - try Config.configureTransformOptionsForBunVM(allocator, opts.args), - opts.env_loader, - ); - var vm = VMHolder.vm.?; - - vm.* = VirtualMachine{ - .global = undefined, - .transpiler_store = RuntimeTranspilerStore.init(), - .allocator = allocator, - .entry_point = ServerEntryPoint{}, - .transpiler = transpiler, - .console = console, - .log = log, - .timer = JSC.BunTimer.All.init(), - .origin = transpiler.options.origin, - .saved_source_map_table = SavedSourceMap.HashTable.init(bun.default_allocator), - .source_mappings = undefined, - .macros = MacroMap.init(allocator), - .macro_entry_points = @TypeOf(vm.macro_entry_points).init(allocator), - .origin_timer = std.time.Timer.start() catch @panic("Please don't mess with timers."), - .origin_timestamp = getOriginTimestamp(), - .ref_strings = JSC.RefString.Map.init(allocator), - .ref_strings_mutex = .{}, - .debug_thread_id = if (Environment.allow_assert) std.Thread.getCurrentId(), - .destruct_main_thread_on_exit = opts.destruct_main_thread_on_exit, - }; - vm.source_mappings.init(&vm.saved_source_map_table); - vm.regular_event_loop.tasks = EventLoop.Queue.init( - default_allocator, - ); - - vm.regular_event_loop.virtual_machine = vm; - vm.regular_event_loop.tasks.ensureUnusedCapacity(64) catch unreachable; - vm.regular_event_loop.concurrent_tasks = .{}; - vm.event_loop = &vm.regular_event_loop; - vm.eventLoop().ensureWaker(); - - vm.transpiler.macro_context = null; - vm.transpiler.resolver.store_fd = opts.store_fd; - vm.transpiler.resolver.prefer_module_field = false; - - vm.transpiler.resolver.onWakePackageManager = .{ - .context = &vm.modules, - .handler = ModuleLoader.AsyncModule.Queue.onWakeHandler, - .onDependencyError = ModuleLoader.AsyncModule.Queue.onDependencyError, - }; - - vm.transpiler.configureLinker(); - - vm.transpiler.macro_context = js_ast.Macro.MacroContext.init(&vm.transpiler); - - vm.smol = opts.smol; - - if (opts.smol) - is_smol_mode = opts.smol; - - vm.configureDebugger(opts.debugger); - vm.body_value_hive_allocator = BodyValueHiveAllocator.init(bun.typedAllocator(JSC.WebCore.Body.Value)); - - return vm; - } - - pub threadlocal var source_code_printer: ?*js_printer.BufferPrinter = null; - - pub fn clearRefString(_: *anyopaque, ref_string: *JSC.RefString) void { - _ = VirtualMachine.get().ref_strings.remove(ref_string.hash); - } - - pub fn refCountedResolvedSource(this: *VirtualMachine, code: []const u8, specifier: bun.String, source_url: []const u8, hash_: ?u32, comptime add_double_ref: bool) ResolvedSource { - // refCountedString will panic if the code is empty - if (code.len == 0) { - return ResolvedSource{ - .source_code = bun.String.init(""), - .specifier = specifier, - .source_url = specifier.createIfDifferent(source_url), - .allocator = null, - .source_code_needs_deref = false, - }; - } - var source = this.refCountedString(code, hash_, !add_double_ref); - if (add_double_ref) { - source.ref(); - source.ref(); - } - - return ResolvedSource{ - .source_code = bun.String.init(source.impl), - .specifier = specifier, - .source_url = specifier.createIfDifferent(source_url), - .allocator = source, - .source_code_needs_deref = false, - }; - } - - fn refCountedStringWithWasNew(this: *VirtualMachine, new: *bool, input_: []const u8, hash_: ?u32, comptime dupe: bool) *JSC.RefString { - JSC.markBinding(@src()); - bun.assert(input_.len > 0); - const hash = hash_ orelse JSC.RefString.computeHash(input_); - this.ref_strings_mutex.lock(); - defer this.ref_strings_mutex.unlock(); - - const entry = this.ref_strings.getOrPut(hash) catch unreachable; - if (!entry.found_existing) { - const input = if (comptime dupe) - (this.allocator.dupe(u8, input_) catch unreachable) - else - input_; - - const ref = this.allocator.create(JSC.RefString) catch unreachable; - ref.* = JSC.RefString{ - .allocator = this.allocator, - .ptr = input.ptr, - .len = input.len, - .impl = bun.String.createExternal(*JSC.RefString, input, true, ref, &freeRefString).value.WTFStringImpl, - .hash = hash, - .ctx = this, - .onBeforeDeinit = VirtualMachine.clearRefString, - }; - entry.value_ptr.* = ref; - } - new.* = !entry.found_existing; - return entry.value_ptr.*; - } - - fn freeRefString(str: *JSC.RefString, _: *anyopaque, _: u32) callconv(.C) void { - str.deinit(); - } - - pub fn refCountedString(this: *VirtualMachine, input_: []const u8, hash_: ?u32, comptime dupe: bool) *JSC.RefString { - bun.assert(input_.len > 0); - var _was_new = false; - return this.refCountedStringWithWasNew(&_was_new, input_, hash_, comptime dupe); - } - - pub fn fetchWithoutOnLoadPlugins( - jsc_vm: *VirtualMachine, - globalObject: *JSGlobalObject, - _specifier: String, - referrer: String, - log: *logger.Log, - comptime flags: FetchFlags, - ) anyerror!ResolvedSource { - bun.assert(VirtualMachine.isLoaded()); - - if (try ModuleLoader.fetchBuiltinModule(jsc_vm, _specifier)) |builtin| { - return builtin; - } - - const specifier_clone = _specifier.toUTF8(bun.default_allocator); - defer specifier_clone.deinit(); - const referrer_clone = referrer.toUTF8(bun.default_allocator); - defer referrer_clone.deinit(); - - var virtual_source_to_use: ?logger.Source = null; - var blob_to_deinit: ?JSC.WebCore.Blob = null; - defer if (blob_to_deinit) |*blob| blob.deinit(); - const lr = options.getLoaderAndVirtualSource(specifier_clone.slice(), jsc_vm, &virtual_source_to_use, &blob_to_deinit, null) catch { - return error.ModuleNotFound; - }; - const module_type: options.ModuleType = if (lr.package_json) |pkg| pkg.module_type else .unknown; - - // .print_source, which is used by exceptions avoids duplicating the entire source code - // but that means we have to be careful of the lifetime of the source code - // so we only want to reset the arena once its done freeing it. - defer if (flags != .print_source) jsc_vm.module_loader.resetArena(jsc_vm); - errdefer if (flags == .print_source) jsc_vm.module_loader.resetArena(jsc_vm); - - return try ModuleLoader.transpileSourceCode( - jsc_vm, - lr.specifier, - referrer_clone.slice(), - _specifier, - lr.path, - lr.loader orelse if (lr.is_main) .js else .file, - module_type, - log, - lr.virtual_source, - null, - VirtualMachine.source_code_printer.?, - globalObject, - flags, - ); - } - - pub const ResolveFunctionResult = struct { - result: ?Resolver.Result, - path: string, - query_string: []const u8 = "", - }; - - fn normalizeSpecifierForResolution(specifier_: []const u8, query_string: *[]const u8) []const u8 { - var specifier = specifier_; - - if (strings.indexOfChar(specifier, '?')) |i| { - query_string.* = specifier[i..]; - specifier = specifier[0..i]; - } - - return specifier; - } - - threadlocal var specifier_cache_resolver_buf: bun.PathBuffer = undefined; - fn _resolve( - jsc_vm: *VirtualMachine, - ret: *ResolveFunctionResult, - specifier: string, - source: string, - is_esm: bool, - comptime is_a_file_path: bool, - ) !void { - if (strings.eqlComptime(std.fs.path.basename(specifier), Runtime.Runtime.Imports.alt_name)) { - ret.path = Runtime.Runtime.Imports.Name; - return; - } else if (strings.eqlComptime(specifier, main_file_name)) { - ret.result = null; - ret.path = jsc_vm.entry_point.source.path.text; - return; - } else if (strings.hasPrefixComptime(specifier, js_ast.Macro.namespaceWithColon)) { - ret.result = null; - ret.path = specifier; - return; - } else if (strings.hasPrefixComptime(specifier, NodeFallbackModules.import_path)) { - ret.result = null; - ret.path = specifier; - return; - } else if (JSC.HardcodedModule.Alias.get(specifier, .bun)) |result| { - ret.result = null; - ret.path = result.path; - return; - } else if (jsc_vm.module_loader.eval_source != null and - (strings.endsWithComptime(specifier, bun.pathLiteral("/[eval]")) or - strings.endsWithComptime(specifier, bun.pathLiteral("/[stdin]")))) - { - ret.result = null; - ret.path = specifier; - return; - } else if (strings.hasPrefixComptime(specifier, "blob:")) { - ret.result = null; - if (JSC.WebCore.ObjectURLRegistry.singleton().has(specifier["blob:".len..])) { - ret.path = specifier; - return; - } else { - return error.ModuleNotFound; - } - } - - const is_special_source = strings.eqlComptime(source, main_file_name) or js_ast.Macro.isMacroPath(source); - var query_string: []const u8 = ""; - const normalized_specifier = normalizeSpecifierForResolution(specifier, &query_string); - const source_to_use = if (!is_special_source) - if (is_a_file_path) - Fs.PathName.init(source).dirWithTrailingSlash() - else - source - else - jsc_vm.transpiler.fs.top_level_dir; - - const result: Resolver.Result = try brk: { - // TODO: We only want to retry on not found only when the directories we searched for were cached. - // This fixes an issue where new files created in cached directories were not picked up. - // See https://github.com/oven-sh/bun/issues/3216 - // - // This cache-bust is disabled when the filesystem is not being used to resolve. - var retry_on_not_found = std.fs.path.isAbsolute(source_to_use); - while (true) { - break :brk switch (jsc_vm.transpiler.resolver.resolveAndAutoInstall( - source_to_use, - normalized_specifier, - if (is_esm) .stmt else .require, - if (jsc_vm.standalone_module_graph == null) .read_only else .disable, - )) { - .success => |r| r, - .failure => |e| e, - .pending, .not_found => if (!retry_on_not_found) - error.ModuleNotFound - else { - retry_on_not_found = false; - - const buster_name = name: { - if (std.fs.path.isAbsolute(normalized_specifier)) { - if (std.fs.path.dirname(normalized_specifier)) |dir| { - // Normalized without trailing slash - break :name bun.strings.normalizeSlashesOnly(&specifier_cache_resolver_buf, dir, std.fs.path.sep); - } - } - - var parts = [_]string{ - source_to_use, - normalized_specifier, - bun.pathLiteral(".."), - }; - - break :name bun.path.joinAbsStringBufZ( - jsc_vm.transpiler.fs.top_level_dir, - &specifier_cache_resolver_buf, - &parts, - .auto, - ); - }; - - // Only re-query if we previously had something cached. - if (jsc_vm.transpiler.resolver.bustDirCache(bun.strings.withoutTrailingSlashWindowsPath(buster_name))) { - continue; - } - - return error.ModuleNotFound; - }, - }; - } - }; - - if (!jsc_vm.macro_mode) { - jsc_vm.has_any_macro_remappings = jsc_vm.has_any_macro_remappings or jsc_vm.transpiler.options.macro_remap.count() > 0; - } - ret.result = result; - ret.query_string = query_string; - const result_path = result.pathConst() orelse return error.ModuleNotFound; - jsc_vm.resolved_count += 1; - - ret.path = result_path.text; - } - - pub fn resolve( - res: *ErrorableString, - global: *JSGlobalObject, - specifier: bun.String, - source: bun.String, - query_string: ?*ZigString, - is_esm: bool, - ) !void { - try resolveMaybeNeedsTrailingSlash(res, global, specifier, source, query_string, is_esm, true, false); - } - - fn normalizeSource(source: []const u8) []const u8 { - if (strings.hasPrefixComptime(source, "file://")) { - return source["file://".len..]; - } - - return source; - } - - pub fn resolveMaybeNeedsTrailingSlash( - res: *ErrorableString, - global: *JSGlobalObject, - specifier: bun.String, - source: bun.String, - query_string: ?*ZigString, - is_esm: bool, - comptime is_a_file_path: bool, - is_user_require_resolve: bool, - ) bun.JSError!void { - if (is_a_file_path and specifier.length() > comptime @as(u32, @intFromFloat(@trunc(@as(f64, @floatFromInt(bun.MAX_PATH_BYTES)) * 1.5)))) { - const specifier_utf8 = specifier.toUTF8(bun.default_allocator); - defer specifier_utf8.deinit(); - const source_utf8 = source.toUTF8(bun.default_allocator); - defer source_utf8.deinit(); - const printed = JSC.ResolveMessage.fmt( - bun.default_allocator, - specifier_utf8.slice(), - source_utf8.slice(), - error.NameTooLong, - if (is_esm) .stmt else if (is_user_require_resolve) .require_resolve else .require, - ) catch bun.outOfMemory(); - const msg = logger.Msg{ - .data = logger.rangeData( - null, - logger.Range.None, - printed, - ), - }; - res.* = ErrorableString.err(error.NameTooLong, JSC.ResolveMessage.create(global, VirtualMachine.get().allocator, msg, source_utf8.slice()).asVoid()); - return; - } - - var result = ResolveFunctionResult{ .path = "", .result = null }; - const jsc_vm = global.bunVM(); - const specifier_utf8 = specifier.toUTF8(bun.default_allocator); - defer specifier_utf8.deinit(); - - const source_utf8 = source.toUTF8(bun.default_allocator); - defer source_utf8.deinit(); - if (jsc_vm.plugin_runner) |plugin_runner| { - if (PluginRunner.couldBePlugin(specifier_utf8.slice())) { - const namespace = PluginRunner.extractNamespace(specifier_utf8.slice()); - const after_namespace = if (namespace.len == 0) - specifier_utf8.slice() - else - specifier_utf8.slice()[namespace.len + 1 .. specifier_utf8.len]; - - if (try plugin_runner.onResolveJSC(bun.String.init(namespace), bun.String.fromUTF8(after_namespace), source, .bun)) |resolved_path| { - res.* = resolved_path; - return; - } - } - } - - if (JSC.HardcodedModule.Alias.get(specifier_utf8.slice(), .bun)) |hardcoded| { - res.* = ErrorableString.ok( - if (is_user_require_resolve and hardcoded.node_builtin) - specifier - else - bun.String.init(hardcoded.path), - ); - return; - } - - const old_log = jsc_vm.log; - // the logger can end up being called on another thread, it must not use threadlocal Heap Allocator - var log = logger.Log.init(bun.default_allocator); - defer log.deinit(); - jsc_vm.log = &log; - jsc_vm.transpiler.resolver.log = &log; - jsc_vm.transpiler.linker.log = &log; - defer { - jsc_vm.log = old_log; - jsc_vm.transpiler.linker.log = old_log; - jsc_vm.transpiler.resolver.log = old_log; - } - jsc_vm._resolve(&result, specifier_utf8.slice(), normalizeSource(source_utf8.slice()), is_esm, is_a_file_path) catch |err_| { - var err = err_; - const msg: logger.Msg = brk: { - const msgs: []logger.Msg = log.msgs.items; - - for (msgs) |m| { - if (m.metadata == .resolve) { - err = m.metadata.resolve.err; - break :brk m; - } - } - - const import_kind: bun.ImportKind = if (is_esm) - .stmt - else if (is_user_require_resolve) - .require_resolve - else - .require; - - const printed = try JSC.ResolveMessage.fmt( - jsc_vm.allocator, - specifier_utf8.slice(), - source_utf8.slice(), - err, - import_kind, - ); - break :brk logger.Msg{ - .data = logger.rangeData( - null, - logger.Range.None, - printed, - ), - .metadata = .{ - .resolve = .{ - .specifier = logger.BabyString.in(printed, specifier_utf8.slice()), - .import_kind = import_kind, - }, - }, - }; - }; - - { - res.* = ErrorableString.err(err, JSC.ResolveMessage.create(global, VirtualMachine.get().allocator, msg, source_utf8.slice()).asVoid()); - } - - return; - }; - - if (query_string) |query| { - query.* = ZigString.init(result.query_string); - } - - res.* = ErrorableString.ok(bun.String.init(result.path)); - } - - pub const main_file_name: string = "bun:main"; - - pub export fn Bun__drainMicrotasksFromJS(globalObject: *JSGlobalObject, callframe: *JSC.CallFrame) callconv(JSC.conv) JSValue { - _ = callframe; // autofix - globalObject.bunVM().drainMicrotasks(); - return .undefined; - } - - pub fn drainMicrotasks(this: *VirtualMachine) void { - this.eventLoop().drainMicrotasks(); - } - - pub fn processFetchLog(globalThis: *JSGlobalObject, specifier: bun.String, referrer: bun.String, log: *logger.Log, ret: *ErrorableResolvedSource, err: anyerror) void { - switch (log.msgs.items.len) { - 0 => { - const msg: logger.Msg = brk: { - if (err == error.UnexpectedPendingResolution) { - break :brk logger.Msg{ - .data = logger.rangeData( - null, - logger.Range.None, - std.fmt.allocPrint(globalThis.allocator(), "Unexpected pending import in \"{}\". To automatically install npm packages with Bun, please use an import statement instead of require() or dynamic import().\nThis error can also happen if dependencies import packages which are not referenced anywhere. Worst case, run `bun install` and opt-out of the node_modules folder until we come up with a better way to handle this error.", .{specifier}) catch unreachable, - ), - }; - } - - break :brk logger.Msg{ - .data = logger.rangeData(null, logger.Range.None, std.fmt.allocPrint(globalThis.allocator(), "{s} while building {}", .{ @errorName(err), specifier }) catch unreachable), - }; - }; - { - ret.* = ErrorableResolvedSource.err(err, JSC.BuildMessage.create(globalThis, globalThis.allocator(), msg).asVoid()); - } - return; - }, - - 1 => { - const msg = log.msgs.items[0]; - ret.* = ErrorableResolvedSource.err(err, switch (msg.metadata) { - .build => JSC.BuildMessage.create(globalThis, globalThis.allocator(), msg).asVoid(), - .resolve => JSC.ResolveMessage.create( - globalThis, - globalThis.allocator(), - msg, - referrer.toUTF8(bun.default_allocator).slice(), - ).asVoid(), - }); - return; - }, - else => { - var errors_stack: [256]JSValue = undefined; - - const len = @min(log.msgs.items.len, errors_stack.len); - const errors = errors_stack[0..len]; - const logs = log.msgs.items[0..len]; - - for (logs, errors) |msg, *current| { - current.* = switch (msg.metadata) { - .build => JSC.BuildMessage.create(globalThis, globalThis.allocator(), msg), - .resolve => JSC.ResolveMessage.create( - globalThis, - globalThis.allocator(), - msg, - referrer.toUTF8(bun.default_allocator).slice(), - ), - }; - } - - ret.* = ErrorableResolvedSource.err( - err, - globalThis.createAggregateError( - errors, - &ZigString.init( - std.fmt.allocPrint(globalThis.allocator(), "{d} errors building \"{}\"", .{ - errors.len, - specifier, - }) catch unreachable, - ), - ).asVoid(), - ); - }, - } - } - - // TODO: - pub fn deinit(this: *VirtualMachine) void { - this.auto_killer.deinit(); - - if (source_code_printer) |print| { - print.getMutableBuffer().deinit(); - print.ctx.written = &.{}; - } - this.source_mappings.deinit(); - if (this.rare_data) |rare_data| { - rare_data.deinit(); - } - this.has_terminated = true; - } - - pub const ExceptionList = std.ArrayList(Api.JsException); - - pub fn printException( - this: *VirtualMachine, - exception: *Exception, - exception_list: ?*ExceptionList, - comptime Writer: type, - writer: Writer, - comptime allow_side_effects: bool, - ) void { - var formatter = ConsoleObject.Formatter{ - .globalThis = this.global, - .quote_strings = false, - .single_line = false, - .stack_check = bun.StackCheck.init(), - }; - defer formatter.deinit(); - if (Output.enable_ansi_colors) { - this.printErrorlikeObject(exception.value(), exception, exception_list, &formatter, Writer, writer, true, allow_side_effects); - } else { - this.printErrorlikeObject(exception.value(), exception, exception_list, &formatter, Writer, writer, false, allow_side_effects); - } - } - - pub fn runErrorHandlerWithDedupe(this: *VirtualMachine, result: JSValue, exception_list: ?*ExceptionList) void { - if (this.last_reported_error_for_dedupe == result and !this.last_reported_error_for_dedupe.isEmptyOrUndefinedOrNull()) - return; - - this.runErrorHandler(result, exception_list); - } - - pub noinline fn runErrorHandler(this: *VirtualMachine, result: JSValue, exception_list: ?*ExceptionList) void { - @branchHint(.cold); - if (!result.isEmptyOrUndefinedOrNull()) - this.last_reported_error_for_dedupe = result; - - const prev_had_errors = this.had_errors; - this.had_errors = false; - defer this.had_errors = prev_had_errors; - - const error_writer = Output.errorWriter(); - var buffered_writer = std.io.bufferedWriter(error_writer); - defer { - buffered_writer.flush() catch {}; - } - - const writer = buffered_writer.writer(); - - if (result.isException(this.global.vm())) { - const exception = @as(*Exception, @ptrCast(result.asVoid())); - this.printException( - exception, - exception_list, - @TypeOf(writer), - writer, - true, - ); - } else { - var formatter = ConsoleObject.Formatter{ - .globalThis = this.global, - .quote_strings = false, - .single_line = false, - .stack_check = bun.StackCheck.init(), - .error_display_level = .full, - }; - defer formatter.deinit(); - switch (Output.enable_ansi_colors) { - inline else => |enable_colors| this.printErrorlikeObject(result, null, exception_list, &formatter, @TypeOf(writer), writer, enable_colors, true), - } - } - } - - export fn Bun__logUnhandledException(exception: JSValue) void { - get().runErrorHandler(exception, null); - } - - pub fn clearEntryPoint( - this: *VirtualMachine, - ) void { - if (this.main.len == 0) { - return; - } - - var str = ZigString.init(main_file_name); - this.global.deleteModuleRegistryEntry(&str); - } - - fn loadPreloads(this: *VirtualMachine) !?*JSInternalPromise { - this.is_in_preload = true; - defer this.is_in_preload = false; - - for (this.preload) |preload| { - var result = switch (this.transpiler.resolver.resolveAndAutoInstall( - this.transpiler.fs.top_level_dir, - normalizeSource(preload), - .stmt, - if (this.standalone_module_graph == null) .read_only else .disable, - )) { - .success => |r| r, - .failure => |e| { - this.log.addErrorFmt( - null, - logger.Loc.Empty, - this.allocator, - "{s} resolving preload {}", - .{ - @errorName(e), - bun.fmt.formatJSONStringLatin1(preload), - }, - ) catch unreachable; - return e; - }, - .pending, .not_found => { - this.log.addErrorFmt( - null, - logger.Loc.Empty, - this.allocator, - "preload not found {}", - .{ - bun.fmt.formatJSONStringLatin1(preload), - }, - ) catch unreachable; - return error.ModuleNotFound; - }, - }; - var promise = JSModuleLoader.import(this.global, &String.fromBytes(result.path().?.text)); - - this.pending_internal_promise = promise; - JSValue.fromCell(promise).protect(); - defer JSValue.fromCell(promise).unprotect(); - - // pending_internal_promise can change if hot module reloading is enabled - if (this.isWatcherEnabled()) { - this.eventLoop().performGC(); - switch (this.pending_internal_promise.?.status(this.global.vm())) { - .pending => { - while (this.pending_internal_promise.?.status(this.global.vm()) == .pending) { - this.eventLoop().tick(); - - if (this.pending_internal_promise.?.status(this.global.vm()) == .pending) { - this.eventLoop().autoTick(); - } - } - }, - else => {}, - } - } else { - this.eventLoop().performGC(); - this.waitForPromise(JSC.AnyPromise{ - .internal = promise, - }); - } - - if (promise.status(this.global.vm()) == .rejected) - return promise; - } - - // only load preloads once - this.preload.len = 0; - - return null; - } - - pub fn ensureDebugger(this: *VirtualMachine, block_until_connected: bool) !void { - if (this.debugger != null) { - try Debugger.create(this, this.global); - - if (block_until_connected) { - Debugger.waitForDebuggerIfNecessary(this); - } - } - } - - extern fn Bun__loadHTMLEntryPoint(global: *JSGlobalObject) *JSInternalPromise; - - pub fn reloadEntryPoint(this: *VirtualMachine, entry_path: []const u8) !*JSInternalPromise { - this.has_loaded = false; - this.main = entry_path; - this.main_hash = Watcher.getHash(entry_path); - - try this.ensureDebugger(true); - - if (!this.main_is_html_entrypoint) { - try this.entry_point.generate( - this.allocator, - this.bun_watcher != .none, - entry_path, - main_file_name, - ); - } - - if (!this.transpiler.options.disable_transpilation) { - if (this.preload.len > 0) { - if (try this.loadPreloads()) |promise| { - JSValue.fromCell(promise).ensureStillAlive(); - JSValue.fromCell(promise).protect(); - this.pending_internal_promise = promise; - return promise; - } - - // Check if Module.runMain was patched - const prev = this.pending_internal_promise; - if (this.has_patched_run_main) { - @branchHint(.cold); - this.pending_internal_promise = null; - const ret = NodeModuleModule__callOverriddenRunMain(this.global, bun.String.createUTF8ForJS(this.global, main_file_name)); - if (this.pending_internal_promise == prev or this.pending_internal_promise == null) { - this.pending_internal_promise = JSInternalPromise.resolvedPromise(this.global, ret); - return this.pending_internal_promise.?; - } - return (this.pending_internal_promise orelse prev).?; - } - } - - const promise = if (!this.main_is_html_entrypoint) - JSModuleLoader.loadAndEvaluateModule(this.global, &String.init(main_file_name)) orelse return error.JSError - else - Bun__loadHTMLEntryPoint(this.global); - - this.pending_internal_promise = promise; - JSValue.fromCell(promise).ensureStillAlive(); - return promise; - } else { - const promise = JSModuleLoader.loadAndEvaluateModule(this.global, &String.fromBytes(this.main)) orelse return error.JSError; - this.pending_internal_promise = promise; - JSValue.fromCell(promise).ensureStillAlive(); - - return promise; - } - } - - extern "C" fn NodeModuleModule__callOverriddenRunMain(global: *JSGlobalObject, argv1: JSValue) JSValue; - export fn Bun__VirtualMachine__setOverrideModuleRunMain(vm: *VirtualMachine, is_patched: bool) void { - if (vm.is_in_preload) { - vm.has_patched_run_main = is_patched; - } - } - export fn Bun__VirtualMachine__setOverrideModuleRunMainPromise(vm: *VirtualMachine, promise: *JSInternalPromise) void { - if (vm.pending_internal_promise == null) { - vm.pending_internal_promise = promise; - } - } - - pub fn reloadEntryPointForTestRunner(this: *VirtualMachine, entry_path: []const u8) !*JSInternalPromise { - this.has_loaded = false; - this.main = entry_path; - this.main_hash = Watcher.getHash(entry_path); - - this.eventLoop().ensureWaker(); - - try this.ensureDebugger(true); - - if (!this.transpiler.options.disable_transpilation) { - if (try this.loadPreloads()) |promise| { - JSValue.fromCell(promise).ensureStillAlive(); - this.pending_internal_promise = promise; - JSValue.fromCell(promise).protect(); - - return promise; - } - } - - const promise = JSModuleLoader.loadAndEvaluateModule(this.global, &String.fromBytes(this.main)) orelse return error.JSError; - this.pending_internal_promise = promise; - JSValue.fromCell(promise).ensureStillAlive(); - - return promise; - } - - // worker dont has bun_watcher and also we dont wanna call autoTick before dispatchOnline - pub fn loadEntryPointForWebWorker(this: *VirtualMachine, entry_path: string) anyerror!*JSInternalPromise { - const promise = try this.reloadEntryPoint(entry_path); - this.eventLoop().performGC(); - this.eventLoop().waitForPromiseWithTermination(JSC.AnyPromise{ - .internal = promise, - }); - if (this.worker) |worker| { - if (worker.hasRequestedTerminate()) { - return error.WorkerTerminated; - } - } - return this.pending_internal_promise.?; - } - - pub fn loadEntryPointForTestRunner(this: *VirtualMachine, entry_path: string) anyerror!*JSInternalPromise { - var promise = try this.reloadEntryPointForTestRunner(entry_path); - - // pending_internal_promise can change if hot module reloading is enabled - if (this.isWatcherEnabled()) { - this.eventLoop().performGC(); - switch (this.pending_internal_promise.?.status(this.global.vm())) { - .pending => { - while (this.pending_internal_promise.?.status(this.global.vm()) == .pending) { - this.eventLoop().tick(); - - if (this.pending_internal_promise.?.status(this.global.vm()) == .pending) { - this.eventLoop().autoTick(); - } - } - }, - else => {}, - } - } else { - if (promise.status(this.global.vm()) == .rejected) { - return promise; - } - - this.eventLoop().performGC(); - this.waitForPromise(.{ .internal = promise }); - } - - this.eventLoop().autoTick(); - - return this.pending_internal_promise.?; - } - - pub fn loadEntryPoint(this: *VirtualMachine, entry_path: string) anyerror!*JSInternalPromise { - var promise = try this.reloadEntryPoint(entry_path); - - // pending_internal_promise can change if hot module reloading is enabled - if (this.isWatcherEnabled()) { - this.eventLoop().performGC(); - switch (this.pending_internal_promise.?.status(this.global.vm())) { - .pending => { - while (this.pending_internal_promise.?.status(this.global.vm()) == .pending) { - this.eventLoop().tick(); - - if (this.pending_internal_promise.?.status(this.global.vm()) == .pending) { - this.eventLoop().autoTick(); - } - } - }, - else => {}, - } - } else { - if (promise.status(this.global.vm()) == .rejected) { - return promise; - } - - this.eventLoop().performGC(); - this.waitForPromise(.{ .internal = promise }); - } - - return this.pending_internal_promise.?; - } - - pub fn addListeningSocketForWatchMode(this: *VirtualMachine, socket: bun.FileDescriptor) void { - if (this.hot_reload != .watch) { - return; - } - - this.rareData().addListeningSocketForWatchMode(socket); - } - pub fn removeListeningSocketForWatchMode(this: *VirtualMachine, socket: bun.FileDescriptor) void { - if (this.hot_reload != .watch) { - return; - } - - this.rareData().removeListeningSocketForWatchMode(socket); - } - - pub fn loadMacroEntryPoint(this: *VirtualMachine, entry_path: string, function_name: string, specifier: string, hash: i32) !*JSInternalPromise { - const entry_point_entry = try this.macro_entry_points.getOrPut(hash); - - if (!entry_point_entry.found_existing) { - var macro_entry_pointer: *MacroEntryPoint = this.allocator.create(MacroEntryPoint) catch unreachable; - entry_point_entry.value_ptr.* = macro_entry_pointer; - try macro_entry_pointer.generate(&this.transpiler, Fs.PathName.init(entry_path), function_name, hash, specifier); - } - const entry_point = entry_point_entry.value_ptr.*; - - var loader = MacroEntryPointLoader{ - .path = entry_point.source.path.text, - }; - - this.runWithAPILock(MacroEntryPointLoader, &loader, MacroEntryPointLoader.load); - return loader.promise orelse return error.JSError; - } - - /// A subtlelty of JavaScriptCore: - /// JavaScriptCore has many release asserts that check an API lock is currently held - /// We cannot hold it from Zig code because it relies on C++ ARIA to automatically release the lock - /// and it is not safe to copy the lock itself - /// So we have to wrap entry points to & from JavaScript with an API lock that calls out to C++ - pub inline fn runWithAPILock(this: *VirtualMachine, comptime Context: type, ctx: *Context, comptime function: fn (ctx: *Context) void) void { - this.global.vm().holdAPILock(ctx, OpaqueWrap(Context, function)); - } - - const MacroEntryPointLoader = struct { - path: string, - promise: ?*JSInternalPromise = null, - pub fn load(this: *MacroEntryPointLoader) void { - this.promise = VirtualMachine.get()._loadMacroEntryPoint(this.path); - } - }; - - pub inline fn _loadMacroEntryPoint(this: *VirtualMachine, entry_path: string) ?*JSInternalPromise { - var promise: *JSInternalPromise = undefined; - - promise = JSModuleLoader.loadAndEvaluateModule(this.global, &String.init(entry_path)) orelse return null; - this.waitForPromise(JSC.AnyPromise{ - .internal = promise, - }); - - return promise; - } - - pub fn printErrorLikeObjectToConsole(this: *VirtualMachine, value: JSValue) void { - this.runErrorHandler(value, null); - } - - // When the Error-like object is one of our own, it's best to rely on the object directly instead of serializing it to a ZigException. - // This is for: - // - BuildMessage - // - ResolveMessage - // If there were multiple errors, it could be contained in an AggregateError. - // In that case, this function becomes recursive. - // In all other cases, we will convert it to a ZigException. - pub fn printErrorlikeObject( - this: *VirtualMachine, - value: JSValue, - exception: ?*Exception, - exception_list: ?*ExceptionList, - formatter: *ConsoleObject.Formatter, - comptime Writer: type, - writer: Writer, - comptime allow_ansi_color: bool, - comptime allow_side_effects: bool, - ) void { - var was_internal = false; - - defer { - if (was_internal) { - if (exception) |exception_| { - var holder = ZigException.Holder.init(); - var zig_exception: *ZigException = holder.zigException(); - holder.deinit(this); - exception_.getStackTrace(this.global, &zig_exception.stack); - if (zig_exception.stack.frames_len > 0) { - if (allow_ansi_color) { - printStackTrace(Writer, writer, zig_exception.stack, true) catch {}; - } else { - printStackTrace(Writer, writer, zig_exception.stack, false) catch {}; - } - } - - if (exception_list) |list| { - zig_exception.addToErrorList(list, this.transpiler.fs.top_level_dir, &this.origin) catch {}; - } - } - } - } - - if (value.isAggregateError(this.global)) { - const AggregateErrorIterator = struct { - writer: Writer, - current_exception_list: ?*ExceptionList = null, - formatter: *ConsoleObject.Formatter, - - pub fn iteratorWithColor(vm: *VM, globalObject: *JSGlobalObject, ctx: ?*anyopaque, nextValue: JSValue) callconv(.C) void { - iterator(vm, globalObject, nextValue, ctx.?, true); - } - pub fn iteratorWithOutColor(vm: *VM, globalObject: *JSGlobalObject, ctx: ?*anyopaque, nextValue: JSValue) callconv(.C) void { - iterator(vm, globalObject, nextValue, ctx.?, false); - } - inline fn iterator(_: *VM, _: *JSGlobalObject, nextValue: JSValue, ctx: ?*anyopaque, comptime color: bool) void { - const this_ = @as(*@This(), @ptrFromInt(@intFromPtr(ctx))); - VirtualMachine.get().printErrorlikeObject(nextValue, null, this_.current_exception_list, this_.formatter, Writer, this_.writer, color, allow_side_effects); - } - }; - var iter = AggregateErrorIterator{ .writer = writer, .current_exception_list = exception_list, .formatter = formatter }; - if (comptime allow_ansi_color) { - value.getErrorsProperty(this.global).forEach(this.global, &iter, AggregateErrorIterator.iteratorWithColor); - } else { - value.getErrorsProperty(this.global).forEach(this.global, &iter, AggregateErrorIterator.iteratorWithOutColor); - } - return; - } - - was_internal = this.printErrorFromMaybePrivateData( - value, - exception_list, - formatter, - Writer, - writer, - allow_ansi_color, - allow_side_effects, - ); - } - - fn printErrorFromMaybePrivateData( - this: *VirtualMachine, - value: JSValue, - exception_list: ?*ExceptionList, - formatter: *ConsoleObject.Formatter, - comptime Writer: type, - writer: Writer, - comptime allow_ansi_color: bool, - comptime allow_side_effects: bool, - ) bool { - if (value.jsType() == .DOMWrapper) { - if (value.as(JSC.BuildMessage)) |build_error| { - defer Output.flush(); - if (!build_error.logged) { - if (this.had_errors) { - writer.writeAll("\n") catch {}; - } - build_error.msg.writeFormat(writer, allow_ansi_color) catch {}; - build_error.logged = true; - writer.writeAll("\n") catch {}; - } - this.had_errors = this.had_errors or build_error.msg.kind == .err; - if (exception_list != null) { - this.log.addMsg( - build_error.msg, - ) catch {}; - } - return true; - } else if (value.as(JSC.ResolveMessage)) |resolve_error| { - defer Output.flush(); - if (!resolve_error.logged) { - if (this.had_errors) { - writer.writeAll("\n") catch {}; - } - resolve_error.msg.writeFormat(writer, allow_ansi_color) catch {}; - resolve_error.logged = true; - writer.writeAll("\n") catch {}; - } - - this.had_errors = this.had_errors or resolve_error.msg.kind == .err; - - if (exception_list != null) { - this.log.addMsg( - resolve_error.msg, - ) catch {}; - } - return true; - } - } - - this.printErrorInstance( - .js, - value, - exception_list, - formatter, - Writer, - writer, - allow_ansi_color, - allow_side_effects, - ) catch |err| { - if (err == error.JSError) { - this.global.clearException(); - } else if (comptime Environment.isDebug) { - // yo dawg - Output.printErrorln("Error while printing Error-like object: {s}", .{@errorName(err)}); - Output.flush(); - } - }; - - return false; - } - - pub fn reportUncaughtException(globalObject: *JSGlobalObject, exception: *Exception) JSValue { - var jsc_vm = globalObject.bunVM(); - _ = jsc_vm.uncaughtException(globalObject, exception.value(), false); - return .undefined; - } - - pub fn printStackTrace(comptime Writer: type, writer: Writer, trace: ZigStackTrace, comptime allow_ansi_colors: bool) !void { - const stack = trace.frames(); - if (stack.len > 0) { - var vm = VirtualMachine.get(); - const origin: ?*const URL = if (vm.is_from_devserver) &vm.origin else null; - const dir = vm.transpiler.fs.top_level_dir; - - for (stack) |frame| { - const file_slice = frame.source_url.toUTF8(bun.default_allocator); - defer file_slice.deinit(); - const func_slice = frame.function_name.toUTF8(bun.default_allocator); - defer func_slice.deinit(); - - const file = file_slice.slice(); - const func = func_slice.slice(); - - if (file.len == 0 and func.len == 0) continue; - - const has_name = std.fmt.count("{}", .{frame.nameFormatter(false)}) > 0; - - if (has_name and !frame.position.isInvalid()) { - try writer.print( - comptime Output.prettyFmt( - " at {} ({})\n", - allow_ansi_colors, - ), - .{ - frame.nameFormatter( - allow_ansi_colors, - ), - frame.sourceURLFormatter( - dir, - origin, - false, - allow_ansi_colors, - ), - }, - ); - } else if (!frame.position.isInvalid()) { - try writer.print( - comptime Output.prettyFmt( - " at {}\n", - allow_ansi_colors, - ), - .{ - frame.sourceURLFormatter( - dir, - origin, - false, - allow_ansi_colors, - ), - }, - ); - } else if (has_name) { - try writer.print( - comptime Output.prettyFmt( - " at {}\n", - allow_ansi_colors, - ), - .{ - frame.nameFormatter( - allow_ansi_colors, - ), - }, - ); - } else { - try writer.print( - comptime Output.prettyFmt( - " at {}\n", - allow_ansi_colors, - ), - .{ - frame.sourceURLFormatter( - dir, - origin, - false, - allow_ansi_colors, - ), - }, - ); - } - } - } - } - - pub export fn Bun__remapStackFramePositions(vm: *JSC.VirtualMachine, frames: [*]JSC.ZigStackFrame, frames_count: usize) void { - // **Warning** this method can be called in the heap collector thread!! - // https://github.com/oven-sh/bun/issues/17087 - vm.remapStackFramePositions(frames, frames_count); - } - - pub fn remapStackFramePositions(this: *VirtualMachine, frames: [*]JSC.ZigStackFrame, frames_count: usize) void { - for (frames[0..frames_count]) |*frame| { - if (frame.position.isInvalid() or frame.remapped) continue; - var sourceURL = frame.source_url.toUTF8(bun.default_allocator); - defer sourceURL.deinit(); - - // **Warning** this method can be called in the heap collector thread!! - // https://github.com/oven-sh/bun/issues/17087 - this.remap_stack_frames_mutex.lock(); - defer this.remap_stack_frames_mutex.unlock(); - - if (this.resolveSourceMapping( - sourceURL.slice(), - @max(frame.position.line.zeroBased(), 0), - @max(frame.position.column.zeroBased(), 0), - .no_source_contents, - )) |lookup| { - const source_map = lookup.source_map; - defer if (source_map) |map| map.deref(); - if (lookup.displaySourceURLIfNeeded(sourceURL.slice())) |source_url| { - frame.source_url.deref(); - frame.source_url = source_url; - } - const mapping = lookup.mapping; - frame.position.line = Ordinal.fromZeroBased(mapping.original.lines); - frame.position.column = Ordinal.fromZeroBased(mapping.original.columns); - frame.remapped = true; - } else { - // we don't want it to be remapped again - frame.remapped = true; - } - } - } - - pub fn remapZigException( - this: *VirtualMachine, - exception: *ZigException, - error_instance: JSValue, - exception_list: ?*ExceptionList, - must_reset_parser_arena_later: *bool, - source_code_slice: *?ZigString.Slice, - allow_source_code_preview: bool, - ) void { - error_instance.toZigException(this.global, exception); - const enable_source_code_preview = allow_source_code_preview and - !(bun.getRuntimeFeatureFlag("BUN_DISABLE_SOURCE_CODE_PREVIEW") or - bun.getRuntimeFeatureFlag("BUN_DISABLE_TRANSPILED_SOURCE_CODE_PREVIEW")); - - defer { - if (Environment.isDebug) { - if (!enable_source_code_preview and source_code_slice.* != null) { - Output.panic("Do not collect source code when we don't need to", .{}); - } else if (!enable_source_code_preview and exception.stack.source_lines_numbers[0] != -1) { - Output.panic("Do not collect source code when we don't need to", .{}); - } - } - } - - // defer this so that it copies correctly - defer if (exception_list) |list| { - exception.addToErrorList(list, this.transpiler.fs.top_level_dir, &this.origin) catch unreachable; - }; - - const NoisyBuiltinFunctionMap = bun.ComptimeStringMap(void, .{ - .{"asyncModuleEvaluation"}, - .{"link"}, - .{"linkAndEvaluateModule"}, - .{"moduleEvaluation"}, - .{"processTicksAndRejections"}, - }); - - var frames: []JSC.ZigStackFrame = exception.stack.frames_ptr[0..exception.stack.frames_len]; - if (this.hide_bun_stackframes) { - var start_index: ?usize = null; - for (frames, 0..) |frame, i| { - if (frame.source_url.eqlComptime("bun:wrap") or - frame.function_name.eqlComptime("::bunternal::")) - { - start_index = i; - break; - } - - // Workaround for being unable to hide that specific frame without also hiding the frame before it - if (frame.source_url.isEmpty() and NoisyBuiltinFunctionMap.getWithEql(frame.function_name, String.eqlComptime) != null) { - start_index = 0; - break; - } - } - - if (start_index) |k| { - var j = k; - for (frames[k..]) |frame| { - if (frame.source_url.eqlComptime("bun:wrap") or - frame.function_name.eqlComptime("::bunternal::")) - { - continue; - } - - // Workaround for being unable to hide that specific frame without also hiding the frame before it - if (frame.source_url.isEmpty() and NoisyBuiltinFunctionMap.getWithEql(frame.function_name, String.eqlComptime) != null) { - continue; - } - - frames[j] = frame; - j += 1; - } - exception.stack.frames_len = @as(u8, @truncate(j)); - frames.len = j; - } - } - - if (frames.len == 0) return; - - var top = &frames[0]; - var top_frame_is_builtin = false; - if (this.hide_bun_stackframes) { - for (frames) |*frame| { - if (frame.source_url.hasPrefixComptime("bun:") or - frame.source_url.hasPrefixComptime("node:") or - frame.source_url.isEmpty() or - frame.source_url.eqlComptime("native") or - frame.source_url.eqlComptime("unknown")) - { - top_frame_is_builtin = true; - continue; - } - - top = frame; - top_frame_is_builtin = false; - break; - } - } - - var top_source_url = top.source_url.toUTF8(bun.default_allocator); - defer top_source_url.deinit(); - - const maybe_lookup = if (top.remapped) - SourceMap.Mapping.Lookup{ - .mapping = .{ - .generated = .{}, - .original = .{ - .lines = @max(top.position.line.zeroBased(), 0), - .columns = @max(top.position.column.zeroBased(), 0), - }, - .source_index = 0, - }, - .source_map = null, - .prefetched_source_code = null, - } - else - this.resolveSourceMapping( - top_source_url.slice(), - @max(top.position.line.zeroBased(), 0), - @max(top.position.column.zeroBased(), 0), - .source_contents, - ); - - if (maybe_lookup) |lookup| { - const mapping = lookup.mapping; - const source_map = lookup.source_map; - defer if (source_map) |map| map.deref(); - - if (!top.remapped) { - if (lookup.displaySourceURLIfNeeded(top_source_url.slice())) |src| { - top.source_url.deref(); - top.source_url = src; - } - } - - const code = code: { - if (!enable_source_code_preview) { - break :code ZigString.Slice.empty; - } - - if (!top.remapped and lookup.source_map != null and lookup.source_map.?.isExternal()) { - if (lookup.getSourceCode(top_source_url.slice())) |src| { - break :code src; - } - } - - if (top_frame_is_builtin) { - // Avoid printing "export default 'native'" - break :code ZigString.Slice.empty; - } - - var log = logger.Log.init(bun.default_allocator); - defer log.deinit(); - - var original_source = fetchWithoutOnLoadPlugins(this, this.global, top.source_url, bun.String.empty, &log, .print_source) catch return; - must_reset_parser_arena_later.* = true; - break :code original_source.source_code.toUTF8(bun.default_allocator); - }; - - if (enable_source_code_preview and code.len == 0) { - exception.collectSourceLines(error_instance, this.global); - } - - if (code.len > 0) - source_code_slice.* = code; - - top.position.line = Ordinal.fromZeroBased(mapping.original.lines); - top.position.column = Ordinal.fromZeroBased(mapping.original.columns); - - exception.remapped = true; - top.remapped = true; - - const last_line = @max(top.position.line.zeroBased(), 0); - if (strings.getLinesInText( - code.slice(), - @intCast(last_line), - ZigException.Holder.source_lines_count, - )) |lines_buf| { - var lines = lines_buf.slice(); - var source_lines = exception.stack.source_lines_ptr[0..ZigException.Holder.source_lines_count]; - var source_line_numbers = exception.stack.source_lines_numbers[0..ZigException.Holder.source_lines_count]; - @memset(source_lines, String.empty); - @memset(source_line_numbers, 0); - - lines = lines[0..@min(@as(usize, lines.len), source_lines.len)]; - var current_line_number: i32 = @intCast(last_line); - for (lines, source_lines[0..lines.len], source_line_numbers[0..lines.len]) |line, *line_dest, *line_number| { - // To minimize duplicate allocations, we use the same slice as above - // it should virtually always be UTF-8 and thus not cloned - line_dest.* = String.init(line); - line_number.* = current_line_number; - current_line_number -= 1; - } - - exception.stack.source_lines_len = @as(u8, @truncate(lines.len)); - } - } else if (enable_source_code_preview) { - exception.collectSourceLines(error_instance, this.global); - } - - if (frames.len > 1) { - for (frames) |*frame| { - if (frame == top or frame.position.isInvalid()) continue; - const source_url = frame.source_url.toUTF8(bun.default_allocator); - defer source_url.deinit(); - if (this.resolveSourceMapping( - source_url.slice(), - @max(frame.position.line.zeroBased(), 0), - @max(frame.position.column.zeroBased(), 0), - .no_source_contents, - )) |lookup| { - defer if (lookup.source_map) |map| map.deref(); - if (lookup.displaySourceURLIfNeeded(source_url.slice())) |src| { - frame.source_url.deref(); - frame.source_url = src; - } - const mapping = lookup.mapping; - frame.remapped = true; - frame.position.line = Ordinal.fromZeroBased(mapping.original.lines); - frame.position.column = Ordinal.fromZeroBased(mapping.original.columns); - } - } - } - } - - pub fn printExternallyRemappedZigException( - this: *VirtualMachine, - zig_exception: *ZigException, - formatter: ?*ConsoleObject.Formatter, - comptime Writer: type, - writer: Writer, - comptime allow_side_effects: bool, - comptime allow_ansi_color: bool, - ) !void { - var default_formatter: ConsoleObject.Formatter = .{ .globalThis = this.global }; - defer default_formatter.deinit(); - try this.printErrorInstance( - .zig_exception, - zig_exception, - null, - formatter orelse &default_formatter, - Writer, - writer, - allow_ansi_color, - allow_side_effects, - ); - } - - fn printErrorInstance( - this: *VirtualMachine, - comptime mode: enum { js, zig_exception }, - error_instance: switch (mode) { - .js => JSValue, - .zig_exception => *ZigException, - }, - exception_list: ?*ExceptionList, - formatter: *ConsoleObject.Formatter, - comptime Writer: type, - writer: Writer, - comptime allow_ansi_color: bool, - comptime allow_side_effects: bool, - ) !void { - var exception_holder = if (mode == .js) ZigException.Holder.init(); - var exception = if (mode == .js) exception_holder.zigException() else error_instance; - defer if (mode == .js) exception_holder.deinit(this); - defer if (mode == .js) error_instance.ensureStillAlive(); - - // The ZigException structure stores substrings of the source code, in - // which we need the lifetime of this data to outlive the inner call to - // remapZigException, but still get freed. - var source_code_slice: ?ZigString.Slice = null; - defer if (source_code_slice) |slice| slice.deinit(); - - if (mode == .js) { - this.remapZigException( - exception, - error_instance, - exception_list, - &exception_holder.need_to_clear_parser_arena_on_deinit, - &source_code_slice, - formatter.error_display_level != .warn, - ); - } - const prev_had_errors = this.had_errors; - this.had_errors = true; - defer this.had_errors = prev_had_errors; - - if (allow_side_effects) { - if (this.debugger) |*debugger| { - debugger.lifecycle_reporter_agent.reportError(exception); - } - } - - defer if (allow_side_effects and Output.is_github_action) - printGithubAnnotation(exception); - - // This is a longer number than necessary because we don't handle this case very well - // At the very least, we shouldn't dump 100 KB of minified code into your terminal. - const max_line_length_with_divot = 512; - const max_line_length = 1024; - - const line_numbers = exception.stack.source_lines_numbers[0..exception.stack.source_lines_len]; - var max_line: i32 = -1; - for (line_numbers) |line| max_line = @max(max_line, line); - const max_line_number_pad = std.fmt.count("{d}", .{max_line + 1}); - - var source_lines = exception.stack.sourceLineIterator(); - var last_pad: u64 = 0; - while (source_lines.untilLast()) |source| { - defer source.text.deinit(); - const display_line = source.line + 1; - - const int_size = std.fmt.count("{d}", .{display_line}); - const pad = max_line_number_pad - int_size; - last_pad = pad; - try writer.writeByteNTimes(' ', pad); - - const trimmed = std.mem.trimRight(u8, std.mem.trim(u8, source.text.slice(), "\n"), "\t "); - const clamped = trimmed[0..@min(trimmed.len, max_line_length)]; - - if (clamped.len != trimmed.len) { - const fmt = if (comptime allow_ansi_color) " | ... truncated \n" else "\n"; - try writer.print( - comptime Output.prettyFmt( - "{d} | {}" ++ fmt, - allow_ansi_color, - ), - .{ display_line, bun.fmt.fmtJavaScript(clamped, .{ .enable_colors = allow_ansi_color }) }, - ); - } else { - try writer.print( - comptime Output.prettyFmt( - "{d} | {}\n", - allow_ansi_color, - ), - .{ display_line, bun.fmt.fmtJavaScript(clamped, .{ .enable_colors = allow_ansi_color }) }, - ); - } - } - - const name = exception.name; - const message = exception.message; - - const is_error_instance = mode == .js and - (error_instance != .zero and error_instance.jsType() == .ErrorInstance); - const code: ?[]const u8 = if (is_error_instance) code: { - if (error_instance.uncheckedPtrCast(JSC.JSObject).getCodePropertyVMInquiry(this.global)) |code_value| { - if (code_value.isString()) { - const code_string = code_value.toBunString(this.global) catch { - // JSC::JSString to WTF::String can only fail on out of memory. - bun.outOfMemory(); - }; - defer code_string.deref(); - - if (code_string.is8Bit()) { - // We can count on this memory being valid until the end - // of this function because - break :code code_string.latin1(); - } - } - } - break :code null; - } else null; - - var did_print_name = false; - if (source_lines.next()) |source| brk: { - if (source.text.len == 0) break :brk; - - var top_frame = if (exception.stack.frames_len > 0) &exception.stack.frames()[0] else null; - - if (this.hide_bun_stackframes) { - for (exception.stack.frames()) |*frame| { - if (frame.position.isInvalid() or frame.source_url.hasPrefixComptime("bun:") or frame.source_url.hasPrefixComptime("node:")) continue; - top_frame = frame; - break; - } - } - - if (top_frame == null or top_frame.?.position.isInvalid()) { - defer did_print_name = true; - defer source.text.deinit(); - const trimmed = std.mem.trimRight(u8, std.mem.trim(u8, source.text.slice(), "\n"), "\t "); - - const text = trimmed[0..@min(trimmed.len, max_line_length)]; - - if (text.len != trimmed.len) { - const fmt = if (comptime allow_ansi_color) " | ... truncated \n" else "\n"; - try writer.print( - comptime Output.prettyFmt( - "- | {}" ++ fmt, - allow_ansi_color, - ), - .{bun.fmt.fmtJavaScript(text, .{ .enable_colors = allow_ansi_color })}, - ); - } else { - try writer.print( - comptime Output.prettyFmt( - "- | {}\n", - allow_ansi_color, - ), - .{bun.fmt.fmtJavaScript(text, .{ .enable_colors = allow_ansi_color })}, - ); - } - - try this.printErrorNameAndMessage(name, message, !exception.browser_url.isEmpty(), code, Writer, writer, allow_ansi_color, formatter.error_display_level); - } else if (top_frame) |top| { - defer did_print_name = true; - const display_line = source.line + 1; - const int_size = std.fmt.count("{d}", .{display_line}); - const pad = max_line_number_pad - int_size; - try writer.writeByteNTimes(' ', pad); - defer source.text.deinit(); - const text = source.text.slice(); - const trimmed = std.mem.trimRight(u8, std.mem.trim(u8, text, "\n"), "\t "); - - // TODO: preserve the divot position and possibly use stringWidth() to figure out where to put the divot - const clamped = trimmed[0..@min(trimmed.len, max_line_length)]; - - if (clamped.len != trimmed.len) { - const fmt = if (comptime allow_ansi_color) " | ... truncated \n\n" else "\n\n"; - try writer.print( - comptime Output.prettyFmt( - "{d} | {}" ++ fmt, - allow_ansi_color, - ), - .{ display_line, bun.fmt.fmtJavaScript(clamped, .{ .enable_colors = allow_ansi_color }) }, - ); - } else { - try writer.print( - comptime Output.prettyFmt( - "{d} | {}\n", - allow_ansi_color, - ), - .{ display_line, bun.fmt.fmtJavaScript(clamped, .{ .enable_colors = allow_ansi_color }) }, - ); - - if (clamped.len < max_line_length_with_divot or top.position.column.zeroBased() > max_line_length_with_divot) { - const indent = max_line_number_pad + " | ".len + @as(u64, @intCast(top.position.column.zeroBased())); - - try writer.writeByteNTimes(' ', indent); - try writer.print(comptime Output.prettyFmt( - "^\n", - allow_ansi_color, - ), .{}); - } else { - try writer.writeAll("\n"); - } - } - - try this.printErrorNameAndMessage(name, message, !exception.browser_url.isEmpty(), code, Writer, writer, allow_ansi_color, formatter.error_display_level); - } - } - - if (!did_print_name) { - try this.printErrorNameAndMessage(name, message, !exception.browser_url.isEmpty(), code, Writer, writer, allow_ansi_color, formatter.error_display_level); - } - - // This is usually unsafe to do, but we are protecting them each time first - var errors_to_append = std.ArrayList(JSValue).init(this.allocator); - defer { - for (errors_to_append.items) |err| { - err.unprotect(); - } - errors_to_append.deinit(); - } - - if (is_error_instance) { - var saw_cause = false; - const Iterator = JSC.JSPropertyIterator(.{ - .include_value = true, - .skip_empty_name = true, - .own_properties_only = true, - .observable = false, - .only_non_index_properties = true, - }); - // SAFETY: error instances are always objects - const error_obj = error_instance.getObject().?; - var iterator = try Iterator.init(this.global, error_obj); - defer iterator.deinit(); - const longest_name = @min(iterator.getLongestPropertyName(), 10); - var is_first_property = true; - while (try iterator.next()) |field| { - const value = iterator.value; - if (field.eqlComptime("message") or field.eqlComptime("name") or field.eqlComptime("stack")) { - continue; - } - - // We special-case the code property. Let's avoid printing it twice. - if (field.eqlComptime("code") and code != null) { - continue; - } - - const kind = value.jsType(); - if (kind == .ErrorInstance and - // avoid infinite recursion - !prev_had_errors) - { - if (field.eqlComptime("cause")) { - saw_cause = true; - } - value.protect(); - try errors_to_append.append(value); - } else if (kind.isObject() or kind.isArray() or value.isPrimitive() or kind.isStringLike()) { - var bun_str = bun.String.empty; - defer bun_str.deref(); - const prev_disable_inspect_custom = formatter.disable_inspect_custom; - const prev_quote_strings = formatter.quote_strings; - const prev_max_depth = formatter.max_depth; - const prev_format_buffer_as_text = formatter.format_buffer_as_text; - formatter.depth += 1; - formatter.format_buffer_as_text = true; - defer { - formatter.depth -= 1; - formatter.max_depth = prev_max_depth; - formatter.quote_strings = prev_quote_strings; - formatter.disable_inspect_custom = prev_disable_inspect_custom; - formatter.format_buffer_as_text = prev_format_buffer_as_text; - } - formatter.max_depth = 1; - formatter.quote_strings = true; - formatter.disable_inspect_custom = true; - - const pad_left = longest_name -| field.length(); - is_first_property = false; - try writer.writeByteNTimes(' ', pad_left); - - try writer.print(comptime Output.prettyFmt(" {}: ", allow_ansi_color), .{field}); - - // When we're printing errors for a top-level uncaught exception / rejection, suppress further errors here. - if (allow_side_effects) { - if (this.global.hasException()) { - this.global.clearException(); - } - } - - formatter.format( - JSC.Formatter.Tag.getAdvanced( - value, - this.global, - .{ .disable_inspect_custom = true, .hide_global = true }, - ), - Writer, - writer, - value, - this.global, - allow_ansi_color, - ) catch {}; - - if (allow_side_effects) { - // When we're printing errors for a top-level uncaught exception / rejection, suppress further errors here. - if (this.global.hasException()) { - this.global.clearException(); - } - } else if (this.global.hasException() or formatter.failed) { - return; - } - - try writer.writeAll(comptime Output.prettyFmt(",\n", allow_ansi_color)); - } - } - - if (code) |code_str| { - const pad_left = longest_name -| "code".len; - is_first_property = false; - try writer.writeByteNTimes(' ', pad_left); - - try writer.print(comptime Output.prettyFmt(" code: {}\n", allow_ansi_color), .{ - bun.fmt.quote(code_str), - }); - } - - if (!is_first_property) { - try writer.writeAll("\n"); - } - - // "cause" is not enumerable, so the above loop won't see it. - if (!saw_cause) { - if (error_instance.getOwn(this.global, "cause")) |cause| { - if (cause.jsType() == .ErrorInstance) { - cause.protect(); - try errors_to_append.append(cause); - } - } - } - } else if (mode == .js and error_instance != .zero) { - // If you do reportError([1,2,3]] we should still show something at least. - const tag = JSC.Formatter.Tag.getAdvanced( - error_instance, - this.global, - .{ .disable_inspect_custom = true, .hide_global = true }, - ); - if (tag.tag != .NativeCode) { - try formatter.format( - tag, - Writer, - writer, - error_instance, - this.global, - allow_ansi_color, - ); - - // Always include a newline in this case - try writer.writeAll("\n"); - } - } - - try printStackTrace(@TypeOf(writer), writer, exception.stack, allow_ansi_color); - - if (!exception.browser_url.isEmpty()) { - try writer.print( - comptime Output.prettyFmt( - " from browser tab {}\n", - allow_ansi_color, - ), - .{exception.browser_url}, - ); - } - - for (errors_to_append.items) |err| { - try writer.writeAll("\n"); - try this.printErrorInstance(.js, err, exception_list, formatter, Writer, writer, allow_ansi_color, allow_side_effects); - } - } - - fn printErrorNameAndMessage( - _: *VirtualMachine, - name: String, - message: String, - is_browser_error: bool, - optional_code: ?[]const u8, - comptime Writer: type, - writer: Writer, - comptime allow_ansi_color: bool, - error_display_level: ConsoleObject.FormatOptions.ErrorDisplayLevel, - ) !void { - if (is_browser_error) { - try writer.writeAll(bun.Output.prettyFmt("frontend ", true)); - } - if (!name.isEmpty() and !message.isEmpty()) { - const display_name, const display_message = if (name.eqlComptime("Error")) brk: { - // If `err.code` is set, and `err.message` is of form `{code}: {text}`, - // use the code as the name since `error: ENOENT: no such ...` is - // not as nice looking since it there are two error prefixes. - if (optional_code) |code| if (bun.strings.isAllASCII(code)) { - const has_prefix = switch (message.isUTF16()) { - inline else => |is_utf16| has_prefix: { - const msg_chars = if (is_utf16) message.utf16() else message.latin1(); - // + 1 to ensure the message is a non-empty string. - break :has_prefix msg_chars.len > code.len + ": ".len + 1 and - (if (is_utf16) - // there is no existing function to perform this slice comparison - // []const u16, []const u8 - for (code, msg_chars[0..code.len]) |a, b| { - if (a != b) break false; - } else true - else - bun.strings.eqlLong(msg_chars[0..code.len], code, false)) and - msg_chars[code.len] == ':' and - msg_chars[code.len + 1] == ' '; - }, - }; - if (has_prefix) break :brk .{ - String.init(code), - message.substring(code.len + ": ".len), - }; - }; - - break :brk .{ String.empty, message }; - } else .{ name, message }; - try writer.print(comptime Output.prettyFmt("{}{}\n", allow_ansi_color), .{ - error_display_level.formatter(display_name, allow_ansi_color, .include_colon), - display_message, - }); - } else if (!name.isEmpty()) { - try writer.print("{}\n", .{error_display_level.formatter(name, allow_ansi_color, .include_colon)}); - } else if (!message.isEmpty()) { - try writer.print(comptime Output.prettyFmt("{}{}\n", allow_ansi_color), .{ error_display_level.formatter(bun.String.empty, allow_ansi_color, .include_colon), message }); - } else { - try writer.print(comptime Output.prettyFmt("{}\n", allow_ansi_color), .{error_display_level.formatter(bun.String.empty, allow_ansi_color, .exclude_colon)}); - } - } - - // In Github Actions, emit an annotation that renders the error and location. - // https://docs.github.com/en/actions/using-workflows/workflow-commands-for-github-actions#setting-an-error-message - pub noinline fn printGithubAnnotation(exception: *ZigException) void { - @branchHint(.cold); - const name = exception.name; - const message = exception.message; - const frames = exception.stack.frames(); - const top_frame = if (frames.len > 0) frames[0] else null; - const dir = bun.getenvZ("GITHUB_WORKSPACE") orelse bun.fs.FileSystem.instance.top_level_dir; - const allocator = bun.default_allocator; - Output.flush(); - - var buffered_writer = std.io.bufferedWriter(Output.errorWriter()); - var writer = buffered_writer.writer(); - defer { - buffered_writer.flush() catch {}; - } - - var has_location = false; - - if (top_frame) |frame| { - if (!frame.position.isInvalid()) { - const source_url = frame.source_url.toUTF8(allocator); - defer source_url.deinit(); - const file = bun.path.relative(dir, source_url.slice()); - writer.print("\n::error file={s},line={d},col={d},title=", .{ - file, - frame.position.line.oneBased(), - frame.position.column.oneBased(), - }) catch {}; - has_location = true; - } - } - - if (!has_location) { - writer.print("\n::error title=", .{}) catch {}; - } - - if (name.isEmpty() or name.eqlComptime("Error")) { - writer.print("error", .{}) catch {}; - } else { - writer.print("{s}", .{name.githubAction()}) catch {}; - } - - if (!message.isEmpty()) { - const message_slice = message.toUTF8(allocator); - defer message_slice.deinit(); - const msg = message_slice.slice(); - - var cursor: u32 = 0; - while (strings.indexOfNewlineOrNonASCIIOrANSI(msg, cursor)) |i| { - cursor = i + 1; - if (msg[i] == '\n') { - const first_line = bun.String.fromUTF8(msg[0..i]); - writer.print(": {s}::", .{first_line.githubAction()}) catch {}; - break; - } - } else { - writer.print(": {s}::", .{message.githubAction()}) catch {}; - } - - while (strings.indexOfNewlineOrNonASCIIOrANSI(msg, cursor)) |i| { - cursor = i + 1; - if (msg[i] == '\n') { - break; - } - } - - if (cursor > 0) { - const body = ZigString.initUTF8(msg[cursor..]); - writer.print("{s}", .{body.githubAction()}) catch {}; - } - } else { - writer.print("::", .{}) catch {}; - } - - // TODO: cleanup and refactor to use printStackTrace() - if (top_frame) |_| { - const vm = VirtualMachine.get(); - const origin = if (vm.is_from_devserver) &vm.origin else null; - - var i: i16 = 0; - while (i < frames.len) : (i += 1) { - const frame = frames[@as(usize, @intCast(i))]; - const source_url = frame.source_url.toUTF8(allocator); - defer source_url.deinit(); - const file = bun.path.relative(dir, source_url.slice()); - const func = frame.function_name.toUTF8(allocator); - - if (file.len == 0 and func.len == 0) continue; - - const has_name = std.fmt.count("{any}", .{frame.nameFormatter( - false, - )}) > 0; - - // %0A = escaped newline - if (has_name) { - writer.print( - "%0A at {any} ({any})", - .{ - frame.nameFormatter(false), - frame.sourceURLFormatter( - file, - origin, - false, - false, - ), - }, - ) catch {}; - } else { - writer.print( - "%0A at {any}", - .{ - frame.sourceURLFormatter( - file, - origin, - false, - false, - ), - }, - ) catch {}; - } - } - } - - writer.print("\n", .{}) catch {}; - } - - pub fn resolveSourceMapping( - this: *VirtualMachine, - path: []const u8, - line: i32, - column: i32, - source_handling: SourceMap.SourceContentHandling, - ) ?SourceMap.Mapping.Lookup { - return this.source_mappings.resolveMapping(path, line, column, source_handling) orelse { - if (this.standalone_module_graph) |graph| { - const file = graph.find(path) orelse return null; - const map = file.sourcemap.load() orelse return null; - - map.ref(); - - this.source_mappings.putValue(path, SavedSourceMap.Value.init(map)) catch - bun.outOfMemory(); - - const mapping = SourceMap.Mapping.find(map.mappings, line, column) orelse - return null; - - return .{ - .mapping = mapping, - .source_map = map, - .prefetched_source_code = null, - }; - } - - return null; - }; - } - - extern fn Process__emitMessageEvent(global: *JSGlobalObject, value: JSValue) void; - extern fn Process__emitDisconnectEvent(global: *JSGlobalObject) void; - pub extern fn Process__emitErrorEvent(global: *JSGlobalObject, value: JSValue) void; - - pub const IPCInstanceUnion = union(enum) { - /// IPC is put in this "enabled but not started" state when IPC is detected - /// but the client JavaScript has not yet done `.on("message")` - waiting: struct { - // TODO: rename to `fd` - info: bun.FD, - mode: IPC.Mode, - }, - initialized: *IPCInstance, - }; - - pub const IPCInstance = struct { - pub const new = bun.TrivialNew(@This()); - pub const deinit = bun.TrivialDeinit(@This()); - - globalThis: ?*JSGlobalObject, - context: if (Environment.isPosix) *uws.SocketContext else void, - data: IPC.IPCData, - has_disconnect_called: bool = false, - - const node_cluster_binding = @import("./node/node_cluster_binding.zig"); - - pub fn ipc(this: *IPCInstance) ?*IPC.IPCData { - return &this.data; - } - pub fn getGlobalThis(this: *IPCInstance) ?*JSGlobalObject { - return this.globalThis; - } - - pub fn handleIPCMessage(this: *IPCInstance, message: IPC.DecodedIPCMessage) void { - JSC.markBinding(@src()); - const globalThis = this.globalThis orelse return; - const event_loop = JSC.VirtualMachine.get().eventLoop(); - - switch (message) { - // In future versions we can read this in order to detect version mismatches, - // or disable future optimizations if the subprocess is old. - .version => |v| { - IPC.log("Parent IPC version is {d}", .{v}); - }, - .data => |data| { - IPC.log("Received IPC message from parent", .{}); - event_loop.enter(); - defer event_loop.exit(); - Process__emitMessageEvent(globalThis, data); - }, - .internal => |data| { - IPC.log("Received IPC internal message from parent", .{}); - event_loop.enter(); - defer event_loop.exit(); - node_cluster_binding.handleInternalMessageChild(globalThis, data) catch return; - }, - } - } - - pub fn handleIPCClose(this: *IPCInstance) void { - IPC.log("IPCInstance#handleIPCClose", .{}); - var vm = VirtualMachine.get(); - vm.ipc = null; - const event_loop = vm.eventLoop(); - node_cluster_binding.child_singleton.deinit(); - event_loop.enter(); - Process__emitDisconnectEvent(vm.global); - event_loop.exit(); - if (Environment.isPosix) { - uws.us_socket_context_free(0, this.context); - } - vm.channel_ref.disable(); - this.deinit(); - } - - export fn Bun__closeChildIPC(global: *JSGlobalObject) void { - if (global.bunVM().getIPCInstance()) |current_ipc| { - current_ipc.data.close(true); - } - } - - pub const Handlers = IPC.NewIPCHandler(IPCInstance); - }; - - pub fn initIPCInstance(this: *VirtualMachine, info: bun.FD, mode: IPC.Mode) void { - IPC.log("initIPCInstance {}", .{info}); - this.ipc = .{ .waiting = .{ .info = info, .mode = mode } }; - } - - pub fn getIPCInstance(this: *VirtualMachine) ?*IPCInstance { - if (this.ipc == null) return null; - if (this.ipc.? != .waiting) return this.ipc.?.initialized; - const opts = this.ipc.?.waiting; - - IPC.log("getIPCInstance {}", .{opts.info}); - - this.event_loop.ensureWaker(); - - const instance = switch (Environment.os) { - else => instance: { - const context = uws.us_create_socket_context(0, this.event_loop_handle.?, @sizeOf(usize), .{}).?; - IPC.Socket.configure(context, true, *IPCInstance, IPCInstance.Handlers); - - var instance = IPCInstance.new(.{ - .globalThis = this.global, - .context = context, - .data = undefined, - }); - - this.ipc = .{ .initialized = instance }; - - const socket = IPC.Socket.fromFd(context, opts.info, IPCInstance, instance, null) orelse { - instance.deinit(); - this.ipc = null; - Output.warn("Unable to start IPC socket", .{}); - return null; - }; - socket.setTimeout(0); - - instance.data = .{ .socket = socket, .mode = opts.mode }; - - break :instance instance; - }, - .windows => instance: { - var instance = IPCInstance.new(.{ - .globalThis = this.global, - .context = {}, - .data = .{ .mode = opts.mode }, - }); - - this.ipc = .{ .initialized = instance }; - - instance.data.configureClient(IPCInstance, instance, opts.info) catch { - instance.deinit(); - this.ipc = null; - Output.warn("Unable to start IPC pipe '{}'", .{opts.info}); - return null; - }; - - break :instance instance; - }, - }; - - instance.data.writeVersionPacket(this.global); - - return instance; - } - - /// To satisfy the interface from NewHotReloader() - pub fn getLoaders(vm: *VirtualMachine) *bun.options.Loader.HashTable { - return &vm.transpiler.options.loaders; - } - - /// To satisfy the interface from NewHotReloader() - pub fn bustDirCache(vm: *VirtualMachine, path: []const u8) bool { - return vm.transpiler.resolver.bustDirCache(path); - } - - comptime { - _ = Bun__remapStackFramePositions; - } -}; - -pub const HotReloader = NewHotReloader(VirtualMachine, EventLoop, false); -pub const WatchReloader = NewHotReloader(VirtualMachine, EventLoop, true); -extern fn BunDebugger__willHotReload() void; - -pub fn NewHotReloader(comptime Ctx: type, comptime EventLoopType: type, comptime reload_immediately: bool) type { - return struct { - const Reloader = @This(); - - ctx: *Ctx, - verbose: bool = false, - pending_count: std.atomic.Value(u32) = std.atomic.Value(u32).init(0), - - tombstones: bun.StringHashMapUnmanaged(*bun.fs.FileSystem.RealFS.EntriesOption) = .{}, - - pub fn init(ctx: *Ctx, fs: *bun.fs.FileSystem, verbose: bool, clear_screen_flag: bool) *Watcher { - const reloader = bun.default_allocator.create(Reloader) catch bun.outOfMemory(); - reloader.* = .{ - .ctx = ctx, - .verbose = Environment.enable_logs or verbose, - }; - - clear_screen = clear_screen_flag; - const watcher = Watcher.init(Reloader, reloader, fs, bun.default_allocator) catch |err| { - bun.handleErrorReturnTrace(err, @errorReturnTrace()); - Output.panic("Failed to enable File Watcher: {s}", .{@errorName(err)}); - }; - watcher.start() catch |err| { - bun.handleErrorReturnTrace(err, @errorReturnTrace()); - Output.panic("Failed to start File Watcher: {s}", .{@errorName(err)}); - }; - return watcher; - } - - fn debug(comptime fmt: string, args: anytype) void { - if (Environment.enable_logs) { - Output.scoped(.hot_reloader, false)(fmt, args); - } else { - Output.prettyErrorln("watcher: " ++ fmt, args); - } - } - - pub fn eventLoop(this: @This()) *EventLoopType { - return this.ctx.eventLoop(); - } - - pub fn enqueueTaskConcurrent(this: @This(), task: *JSC.ConcurrentTask) void { - if (comptime reload_immediately) - unreachable; - - this.eventLoop().enqueueTaskConcurrent(task); - } - - pub var clear_screen = false; - - pub const HotReloadTask = struct { - count: u8 = 0, - hashes: [8]u32, - paths: if (Ctx == bun.bake.DevServer) [8][]const u8 else void, - /// Left uninitialized until .enqueue - concurrent_task: JSC.ConcurrentTask, - reloader: *Reloader, - - pub fn initEmpty(reloader: *Reloader) HotReloadTask { - return .{ - .reloader = reloader, - - .hashes = [_]u32{0} ** 8, - .paths = if (Ctx == bun.bake.DevServer) [_][]const u8{&.{}} ** 8, - .count = 0, - .concurrent_task = undefined, - }; - } - - pub fn append(this: *HotReloadTask, id: u32) void { - if (this.count == 8) { - this.enqueue(); - this.count = 0; - } - - this.hashes[this.count] = id; - this.count += 1; - } - - pub fn run(this: *HotReloadTask) void { - // Since we rely on the event loop for hot reloads, there can be - // a delay before the next reload begins. In the time between the - // last reload and the next one, we shouldn't schedule any more - // hot reloads. Since we reload literally everything, we don't - // need to worry about missing any changes. - // - // Note that we set the count _before_ we reload, so that if we - // get another hot reload request while we're reloading, we'll - // still enqueue it. - while (this.reloader.pending_count.swap(0, .monotonic) > 0) { - this.reloader.ctx.reload(this); - } - } - - pub fn enqueue(this: *HotReloadTask) void { - JSC.markBinding(@src()); - if (this.count == 0) - return; - - if (comptime reload_immediately) { - Output.flush(); - if (comptime Ctx == ImportWatcher) { - if (this.reloader.ctx.rare_data) |rare| - rare.closeAllListenSocketsForWatchMode(); - } - bun.reloadProcess(bun.default_allocator, clear_screen, false); - unreachable; - } - - _ = this.reloader.pending_count.fetchAdd(1, .monotonic); - - BunDebugger__willHotReload(); - const that = bun.new(HotReloadTask, .{ - .reloader = this.reloader, - .count = this.count, - .paths = this.paths, - .hashes = this.hashes, - .concurrent_task = undefined, - }); - that.concurrent_task = .{ .task = Task.init(that), .auto_delete = false }; - that.reloader.enqueueTaskConcurrent(&that.concurrent_task); - this.count = 0; - } - - pub fn deinit(this: *HotReloadTask) void { - bun.destroy(this); - } - }; - - pub fn enableHotModuleReloading(this: *Ctx) void { - if (comptime @TypeOf(this.bun_watcher) == ImportWatcher) { - if (this.bun_watcher != .none) - return; - } else { - if (this.bun_watcher != null) - return; - } - - var reloader = bun.default_allocator.create(Reloader) catch bun.outOfMemory(); - reloader.* = .{ - .ctx = this, - .verbose = Environment.enable_logs or if (@hasField(Ctx, "log")) this.log.level.atLeast(.info) else false, - }; - - if (comptime @TypeOf(this.bun_watcher) == ImportWatcher) { - this.bun_watcher = if (reload_immediately) - .{ .watch = Watcher.init( - Reloader, - reloader, - this.transpiler.fs, - bun.default_allocator, - ) catch |err| { - bun.handleErrorReturnTrace(err, @errorReturnTrace()); - Output.panic("Failed to enable File Watcher: {s}", .{@errorName(err)}); - } } - else - .{ .hot = Watcher.init( - Reloader, - reloader, - this.transpiler.fs, - bun.default_allocator, - ) catch |err| { - bun.handleErrorReturnTrace(err, @errorReturnTrace()); - Output.panic("Failed to enable File Watcher: {s}", .{@errorName(err)}); - } }; - - if (reload_immediately) { - this.transpiler.resolver.watcher = Resolver.ResolveWatcher(*Watcher, Watcher.onMaybeWatchDirectory).init(this.bun_watcher.watch); - } else { - this.transpiler.resolver.watcher = Resolver.ResolveWatcher(*Watcher, Watcher.onMaybeWatchDirectory).init(this.bun_watcher.hot); - } - } else { - this.bun_watcher = Watcher.init( - Reloader, - reloader, - this.transpiler.fs, - bun.default_allocator, - ) catch |err| { - bun.handleErrorReturnTrace(err, @errorReturnTrace()); - Output.panic("Failed to enable File Watcher: {s}", .{@errorName(err)}); - }; - this.transpiler.resolver.watcher = Resolver.ResolveWatcher(*Watcher, Watcher.onMaybeWatchDirectory).init(this.bun_watcher.?); - } - - clear_screen = !this.transpiler.env.hasSetNoClearTerminalOnReload(!Output.enable_ansi_colors); - - reloader.getContext().start() catch @panic("Failed to start File Watcher"); - } - - fn putTombstone(this: *@This(), key: []const u8, value: *bun.fs.FileSystem.RealFS.EntriesOption) void { - this.tombstones.put(bun.default_allocator, key, value) catch unreachable; - } - - fn getTombstone(this: *@This(), key: []const u8) ?*bun.fs.FileSystem.RealFS.EntriesOption { - return this.tombstones.get(key); - } - - pub fn onError( - _: *@This(), - err: bun.sys.Error, - ) void { - Output.err(@as(bun.C.E, @enumFromInt(err.errno)), "Watcher crashed", .{}); - if (bun.Environment.isDebug) { - @panic("Watcher crash"); - } - } - - pub fn getContext(this: *@This()) *Watcher { - if (comptime @TypeOf(this.ctx.bun_watcher) == ImportWatcher) { - if (reload_immediately) { - return this.ctx.bun_watcher.watch; - } else { - return this.ctx.bun_watcher.hot; - } - } else if (@typeInfo(@TypeOf(this.ctx.bun_watcher)) == .optional) { - return this.ctx.bun_watcher.?; - } else { - return this.ctx.bun_watcher; - } - } - - pub noinline fn onFileUpdate( - this: *@This(), - events: []Watcher.WatchEvent, - changed_files: []?[:0]u8, - watchlist: Watcher.WatchList, - ) void { - const slice = watchlist.slice(); - const file_paths = slice.items(.file_path); - const counts = slice.items(.count); - const kinds = slice.items(.kind); - const hashes = slice.items(.hash); - const parents = slice.items(.parent_hash); - const file_descriptors = slice.items(.fd); - const ctx = this.getContext(); - defer ctx.flushEvictions(); - defer Output.flush(); - - const fs: *Fs.FileSystem = &Fs.FileSystem.instance; - const rfs: *Fs.FileSystem.RealFS = &fs.fs; - var _on_file_update_path_buf: bun.PathBuffer = undefined; - var current_task = HotReloadTask.initEmpty(this); - defer current_task.enqueue(); - - for (events) |event| { - const file_path = file_paths[event.index]; - const update_count = counts[event.index] + 1; - counts[event.index] = update_count; - const kind = kinds[event.index]; - - // so it's consistent with the rest - // if we use .extname we might run into an issue with whether or not the "." is included. - // const path = Fs.PathName.init(file_path); - const current_hash = hashes[event.index]; - - switch (kind) { - .file => { - if (event.op.delete or event.op.rename) { - ctx.removeAtIndex( - event.index, - 0, - &.{}, - .file, - ); - } - - if (this.verbose) - debug("File changed: {s}", .{fs.relativeTo(file_path)}); - - if (event.op.write or event.op.delete or event.op.rename) { - current_task.append(current_hash); - } - - // TODO: delete events? - }, - .directory => { - if (comptime Environment.isWindows) { - // on windows we receive file events for all items affected by a directory change - // so we only need to clear the directory cache. all other effects will be handled - // by the file events - _ = this.ctx.bustDirCache(strings.withoutTrailingSlashWindowsPath(file_path)); - continue; - } - var affected_buf: [128][]const u8 = undefined; - var entries_option: ?*Fs.FileSystem.RealFS.EntriesOption = null; - - const affected = brk: { - if (comptime Environment.isMac) { - if (rfs.entries.get(file_path)) |existing| { - this.putTombstone(file_path, existing); - entries_option = existing; - } else if (this.getTombstone(file_path)) |existing| { - entries_option = existing; - } - - var affected_i: usize = 0; - - // if a file descriptor is stale, we need to close it - if (event.op.delete and entries_option != null) { - for (parents, 0..) |parent_hash, entry_id| { - if (parent_hash == current_hash) { - const affected_path = file_paths[entry_id]; - const was_deleted = check: { - std.posix.access(affected_path, std.posix.F_OK) catch break :check true; - break :check false; - }; - if (!was_deleted) continue; - - affected_buf[affected_i] = affected_path[file_path.len..]; - affected_i += 1; - if (affected_i >= affected_buf.len) break; - } - } - } - - break :brk affected_buf[0..affected_i]; - } - - break :brk event.names(changed_files); - }; - - if (affected.len > 0 and !Environment.isMac) { - if (rfs.entries.get(file_path)) |existing| { - this.putTombstone(file_path, existing); - entries_option = existing; - } else if (this.getTombstone(file_path)) |existing| { - entries_option = existing; - } - } - - _ = this.ctx.bustDirCache(strings.withoutTrailingSlashWindowsPath(file_path)); - - if (entries_option) |dir_ent| { - var last_file_hash: Watcher.HashType = std.math.maxInt(Watcher.HashType); - - for (affected) |changed_name_| { - const changed_name: []const u8 = if (comptime Environment.isMac) - changed_name_ - else - bun.asByteSlice(changed_name_.?); - if (changed_name.len == 0 or changed_name[0] == '~' or changed_name[0] == '.') continue; - - const loader = (this.ctx.getLoaders().get(Fs.PathName.init(changed_name).ext) orelse .file); - var prev_entry_id: usize = std.math.maxInt(usize); - if (loader != .file) { - var path_string: bun.PathString = undefined; - var file_hash: Watcher.HashType = last_file_hash; - const abs_path: string = brk: { - if (dir_ent.entries.get(@as([]const u8, @ptrCast(changed_name)))) |file_ent| { - // reset the file descriptor - file_ent.entry.cache.fd = .invalid; - file_ent.entry.need_stat = true; - path_string = file_ent.entry.abs_path; - file_hash = Watcher.getHash(path_string.slice()); - for (hashes, 0..) |hash, entry_id| { - if (hash == file_hash) { - if (file_descriptors[entry_id].isValid()) { - if (prev_entry_id != entry_id) { - current_task.append(hashes[entry_id]); - ctx.removeAtIndex( - @as(u16, @truncate(entry_id)), - 0, - &.{}, - .file, - ); - } - } - - prev_entry_id = entry_id; - break; - } - } - - break :brk path_string.slice(); - } else { - const file_path_without_trailing_slash = std.mem.trimRight(u8, file_path, std.fs.path.sep_str); - @memcpy(_on_file_update_path_buf[0..file_path_without_trailing_slash.len], file_path_without_trailing_slash); - _on_file_update_path_buf[file_path_without_trailing_slash.len] = std.fs.path.sep; - - @memcpy(_on_file_update_path_buf[file_path_without_trailing_slash.len..][0..changed_name.len], changed_name); - const path_slice = _on_file_update_path_buf[0 .. file_path_without_trailing_slash.len + changed_name.len + 1]; - file_hash = Watcher.getHash(path_slice); - break :brk path_slice; - } - }; - - // skip consecutive duplicates - if (last_file_hash == file_hash) continue; - last_file_hash = file_hash; - - if (this.verbose) - debug("File change: {s}", .{fs.relativeTo(abs_path)}); - } - } - } - - if (this.verbose) { - debug("Dir change: {s}", .{fs.relativeTo(file_path)}); - } - }, - } - } - } - }; -} - -export fn Bun__addSourceProviderSourceMap(vm: *VirtualMachine, opaque_source_provider: *anyopaque, specifier: *bun.String) void { - var sfb = std.heap.stackFallback(4096, bun.default_allocator); - const slice = specifier.toUTF8(sfb.get()); - defer slice.deinit(); - vm.source_mappings.putZigSourceProvider(opaque_source_provider, slice.slice()); -} - -export fn Bun__removeSourceProviderSourceMap(vm: *VirtualMachine, opaque_source_provider: *anyopaque, specifier: *bun.String) void { - var sfb = std.heap.stackFallback(4096, bun.default_allocator); - const slice = specifier.toUTF8(sfb.get()); - defer slice.deinit(); - vm.source_mappings.removeZigSourceProvider(opaque_source_provider, slice.slice()); -} - -pub export var isBunTest: bool = false; - -// TODO: evaluate if this has any measurable performance impact. -pub var synthetic_allocation_limit: usize = std.math.maxInt(u32); -pub var string_allocation_limit: usize = std.math.maxInt(u32); - -comptime { - @export(&string_allocation_limit, .{ .name = "Bun__stringSyntheticAllocationLimit" }); -} - -pub fn Bun__setSyntheticAllocationLimitForTesting(globalObject: *JSGlobalObject, callframe: *JSC.CallFrame) bun.JSError!JSValue { - const args = callframe.arguments_old(1).slice(); - if (args.len < 1) { - return globalObject.throwNotEnoughArguments("setSyntheticAllocationLimitForTesting", 1, args.len); - } - - if (!args[0].isNumber()) { - return globalObject.throwInvalidArguments("setSyntheticAllocationLimitForTesting expects a number", .{}); - } - - const limit: usize = @intCast(@max(args[0].coerceToInt64(globalObject), 1024 * 1024)); - const prev = synthetic_allocation_limit; - synthetic_allocation_limit = limit; - string_allocation_limit = limit; - return JSValue.jsNumber(prev); -} diff --git a/src/bun.js/javascript_core_c_api.zig b/src/bun.js/javascript_core_c_api.zig index 8d1c9ab3c1..7ac6438202 100644 --- a/src/bun.js/javascript_core_c_api.zig +++ b/src/bun.js/javascript_core_c_api.zig @@ -6,18 +6,17 @@ /// ************************************ const bun = @import("bun"); const std = @import("std"); -const cpp = @import("./bindings/bindings.zig"); const JSC = bun.JSC; const generic = opaque { - pub fn value(this: *const generic) cpp.JSValue { - return @as(cpp.JSValue, @enumFromInt(@as(cpp.JSValueReprInt, @bitCast(@intFromPtr(this))))); + pub fn value(this: *const generic) JSC.JSValue { + return @enumFromInt(@as(JSC.JSValue.backing_int, @bitCast(@intFromPtr(this)))); } }; pub const Private = anyopaque; pub const struct_OpaqueJSContextGroup = generic; pub const JSContextGroupRef = ?*const struct_OpaqueJSContextGroup; pub const struct_OpaqueJSContext = generic; -pub const JSGlobalContextRef = ?*cpp.JSGlobalObject; +pub const JSGlobalContextRef = ?*JSC.JSGlobalObject; pub const struct_OpaqueJSPropertyNameAccumulator = generic; pub const JSPropertyNameAccumulatorRef = ?*struct_OpaqueJSPropertyNameAccumulator; @@ -162,7 +161,7 @@ pub const OpaqueJSPropertyNameAccumulator = struct_OpaqueJSPropertyNameAccumulat // This is a workaround for not receiving a JSException* object // This function lets us use the C API but returns a plain old JSValue // allowing us to have exceptions that include stack traces -pub extern "c" fn JSObjectCallAsFunctionReturnValueHoldingAPILock(ctx: *JSC.JSGlobalObject, object: JSObjectRef, thisObject: JSObjectRef, argumentCount: usize, arguments: [*c]const JSValueRef) cpp.JSValue; +pub extern "c" fn JSObjectCallAsFunctionReturnValueHoldingAPILock(ctx: *JSC.JSGlobalObject, object: JSObjectRef, thisObject: JSObjectRef, argumentCount: usize, arguments: [*c]const JSValueRef) JSC.JSValue; pub extern fn JSRemoteInspectorDisableAutoStart() void; pub extern fn JSRemoteInspectorStart() void; diff --git a/src/bun.js/jsc.zig b/src/bun.js/jsc.zig new file mode 100644 index 0000000000..442a942c22 --- /dev/null +++ b/src/bun.js/jsc.zig @@ -0,0 +1,279 @@ +//! Bindings to JavaScriptCore and other JavaScript primatives such as +//! VirtualMachine, JSGlobalObject (Zig::GlobalObject), and the event loop. +//! +//! Web and runtime-specific APIs should go in `bun.webcore` and `bun.api`. +//! +//! TODO: Remove remaining aliases to `webcore` and `api` + +/// The calling convention used for JavaScript functions <> Native +pub const conv = if (bun.Environment.isWindows and bun.Environment.isX64) + std.builtin.CallingConvention.SysV +else + std.builtin.CallingConvention.C; + +/// Web Template Framework +pub const wtf = @import("bindings/WTF.zig").WTF; + +/// Binding for JSCInitialize in ZigGlobalObject.cpp +pub fn initialize(eval_mode: bool) void { + markBinding(@src()); + bun.analytics.Features.jsc += 1; + JSCInitialize(std.os.environ.ptr, std.os.environ.len, onJSCInvalidEnvVar, eval_mode); +} + +pub const JSValue = @import("bindings/JSValue.zig").JSValue; + +// Host functions are the native function pointer type that can be used by a +// JSC::JSFunction to call native code from JavaScript. To allow usage of `try` +// for error handling, Bun provides toJSHostFn to wrap JSHostFnZig into JSHostFn. +pub const host_fn = @import("jsc/host_fn.zig"); +pub const JSHostFn = host_fn.JSHostFn; +pub const JSHostFnZig = host_fn.JSHostFnZig; +pub const JSHostFnZigWithContext = host_fn.JSHostFnZigWithContext; +pub const JSHostFunctionTypeWithContext = host_fn.JSHostFunctionTypeWithContext; +pub const toJSHostFn = host_fn.toJSHostFn; +pub const toJSHostFnWithContext = host_fn.toJSHostFnWithContext; +pub const toJSHostValue = host_fn.toJSHostValue; +pub const createCallback = host_fn.createCallback; + +// JSC Classes Bindings +pub const AnyPromise = @import("bindings/AnyPromise.zig").AnyPromise; +pub const array_buffer = @import("jsc/array_buffer.zig"); +pub const ArrayBuffer = array_buffer.ArrayBuffer; +pub const MarkedArrayBuffer = array_buffer.MarkedArrayBuffer; +pub const CachedBytecode = @import("bindings/CachedBytecode.zig").CachedBytecode; +pub const CallFrame = @import("bindings/CallFrame.zig").CallFrame; +pub const CommonAbortReason = @import("bindings/CommonAbortReason.zig").CommonAbortReason; +pub const CommonStrings = @import("bindings/CommonStrings.zig").CommonStrings; +pub const CustomGetterSetter = @import("bindings/CustomGetterSetter.zig").CustomGetterSetter; +pub const DOMFormData = @import("bindings/DOMFormData.zig").DOMFormData; +pub const DOMURL = @import("bindings/DOMURL.zig").DOMURL; +pub const DeferredError = @import("bindings/DeferredError.zig").DeferredError; +pub const EncodedJSValue = @import("bindings/EncodedJSValue.zig").EncodedJSValue; +pub const GetterSetter = @import("bindings/GetterSetter.zig").GetterSetter; +pub const JSArray = @import("bindings/JSArray.zig").JSArray; +pub const JSArrayIterator = @import("bindings/JSArrayIterator.zig").JSArrayIterator; +pub const JSCell = @import("bindings/JSCell.zig").JSCell; +pub const JSFunction = @import("bindings/JSFunction.zig").JSFunction; +pub const JSGlobalObject = @import("bindings/JSGlobalObject.zig").JSGlobalObject; +pub const JSInternalPromise = @import("bindings/JSInternalPromise.zig").JSInternalPromise; +pub const JSMap = @import("bindings/JSMap.zig").JSMap; +pub const JSModuleLoader = @import("bindings/JSModuleLoader.zig").JSModuleLoader; +pub const JSObject = @import("bindings/JSObject.zig").JSObject; +pub const JSPromise = @import("bindings/JSPromise.zig").JSPromise; +pub const JSPromiseRejectionOperation = @import("bindings/JSPromiseRejectionOperation.zig").JSPromiseRejectionOperation; +pub const JSRef = @import("bindings/JSRef.zig").JSRef; +pub const JSString = @import("bindings/JSString.zig").JSString; +pub const JSUint8Array = @import("bindings/JSUint8Array.zig").JSUint8Array; +pub const RefString = @import("jsc/RefString.zig"); +pub const ScriptExecutionStatus = @import("bindings/ScriptExecutionStatus.zig").ScriptExecutionStatus; +pub const SourceType = @import("bindings/SourceType.zig").SourceType; +pub const Strong = @import("Strong.zig"); +pub const SystemError = @import("bindings/SystemError.zig").SystemError; +pub const URL = @import("bindings/URL.zig").URL; +pub const URLSearchParams = @import("bindings/URLSearchParams.zig").URLSearchParams; +pub const VM = @import("bindings/VM.zig").VM; +pub const Weak = @import("Weak.zig").Weak; +pub const WeakRefType = @import("Weak.zig").WeakRefType; +pub const Exception = @import("bindings/Exception.zig").Exception; +pub const SourceProvider = @import("bindings/SourceProvider.zig").SourceProvider; + +// JavaScript-related +pub const Errorable = @import("bindings/Errorable.zig").Errorable; +pub const ResolvedSource = @import("bindings/ResolvedSource.zig").ResolvedSource; +pub const ErrorCode = @import("bindings/ErrorCode.zig").ErrorCode; +pub const JSErrorCode = @import("bindings/JSErrorCode.zig").JSErrorCode; +pub const ZigErrorType = @import("bindings/ZigErrorType.zig").ZigErrorType; +pub const Debugger = @import("Debugger.zig"); +pub const SavedSourceMap = @import("SavedSourceMap.zig"); +pub const VirtualMachine = @import("VirtualMachine.zig"); +pub const ModuleLoader = @import("ModuleLoader.zig"); +pub const RareData = @import("rare_data.zig"); +pub const EventType = @import("bindings/EventType.zig").EventType; +pub const JSRuntimeType = @import("bindings/JSRuntimeType.zig").JSRuntimeType; +pub const ZigStackFrameCode = @import("bindings/ZigStackFrameCode.zig").ZigStackFrameCode; + +pub const ErrorableResolvedSource = Errorable(ResolvedSource); +pub const ErrorableZigString = Errorable(ZigString); +pub const ErrorableJSValue = Errorable(JSValue); +pub const ErrorableString = Errorable(bun.String); + +pub const ZigStackTrace = @import("bindings/ZigStackTrace.zig").ZigStackTrace; +pub const ZigStackFrame = @import("bindings/ZigStackFrame.zig").ZigStackFrame; +pub const ZigStackFramePosition = @import("bindings/ZigStackFramePosition.zig").ZigStackFramePosition; +pub const ZigException = @import("bindings/ZigException.zig").ZigException; + +pub const ConsoleObject = @import("ConsoleObject.zig"); +pub const Formatter = ConsoleObject.Formatter; + +pub const hot_reloader = @import("hot_reloader.zig"); + +// TODO: move into bun.api +pub const Jest = @import("test/jest.zig"); +pub const TestScope = @import("test/jest.zig").TestScope; +pub const Expect = @import("test/expect.zig"); +pub const Snapshot = @import("test/snapshot.zig"); + +pub const js_property_iterator = @import("bindings/JSPropertyIterator.zig"); +pub const JSPropertyIterator = js_property_iterator.JSPropertyIterator; +pub const JSPropertyIteratorOptions = js_property_iterator.JSPropertyIteratorOptions; + +const event_loop = @import("event_loop.zig"); +pub const AbstractVM = event_loop.AbstractVM; +pub const AnyEventLoop = event_loop.AnyEventLoop; +pub const AnyTask = event_loop.AnyTask; +pub const AnyTaskWithExtraContext = event_loop.AnyTaskWithExtraContext; +pub const ConcurrentCppTask = event_loop.ConcurrentCppTask; +pub const ConcurrentPromiseTask = event_loop.ConcurrentPromiseTask; +pub const ConcurrentTask = event_loop.ConcurrentTask; +pub const CppTask = event_loop.CppTask; +pub const DeferredTaskQueue = event_loop.DeferredTaskQueue; +pub const EventLoop = event_loop.EventLoop; +pub const EventLoopHandle = event_loop.EventLoopHandle; +pub const EventLoopKind = event_loop.EventLoopKind; +pub const EventLoopTask = event_loop.EventLoopTask; +pub const EventLoopTaskPtr = event_loop.EventLoopTaskPtr; +pub const GarbageCollectionController = event_loop.GarbageCollectionController; +pub const JsVM = event_loop.JsVM; +pub const ManagedTask = event_loop.ManagedTask; +pub const MiniEventLoop = event_loop.MiniEventLoop; +pub const MiniVM = event_loop.MiniVM; +pub const PlatformEventLoop = if (bun.Environment.isPosix) bun.uws.Loop else bun.Async.Loop; +pub const PosixSignalHandle = event_loop.PosixSignalHandle; +pub const PosixSignalTask = event_loop.PosixSignalTask; +pub const Task = event_loop.Task; +pub const WorkPool = event_loop.WorkPool; +pub const WorkPoolTask = event_loop.WorkPoolTask; +pub const WorkTask = event_loop.WorkTask; + +/// Deprecated: Use `bun.sys.Maybe` +pub const Maybe = bun.sys.Maybe; +/// Deprecated: Use the .fromAny() decl literal +pub const toJS = JSValue.fromAny; +/// Deprecated: Use the .jsBoolean() decl literal +pub const jsBoolean = JSValue.jsBoolean; +/// Deprecated: Use the .jsEmptyString() decl literal +pub const jsEmptyString = JSValue.jsEmptyString; +/// Deprecated: Use the .jsNumber() decl literal +pub const jsNumber = JSValue.jsNumber; +/// Deprecated: Avoid using this in new code. +pub const C = @import("javascript_core_c_api.zig"); +/// Deprecated: Remove all of these please. +pub const Sizes = @import("bindings/sizes.zig"); +/// Deprecated: Use `bun.String` +pub const ZigString = @import("bindings/ZigString.zig").ZigString; +/// Deprecated: Use `bun.webcore` +pub const WebCore = bun.webcore; +/// Deprecated: Use `bun.api` +pub const API = bun.api; +/// Deprecated: Use `bun.api.node` +pub const Node = bun.api.node; +/// Deprecated: use `bun.api.HTMLRewriter` +pub const Cloudflare = bun.api.HTMLRewriter; + +const log = bun.Output.scoped(.JSC, true); +pub inline fn markBinding(src: std.builtin.SourceLocation) void { + log("{s} ({s}:{d})", .{ src.fn_name, src.file, src.line }); +} +pub inline fn markMemberBinding(comptime class: anytype, src: std.builtin.SourceLocation) void { + if (!bun.Environment.enable_logs) return; + const classname = switch (@typeInfo(@TypeOf(class))) { + .pointer => class, // assumed to be a static string + else => @typeName(class), + }; + log("{s}.{s} ({s}:{d})", .{ classname, src.fn_name, src.file, src.line }); +} + +pub const Subprocess = bun.api.Subprocess; + +/// This file is generated by: +/// 1. `bun src/bun.js/scripts/generate-classes.ts` +/// 2. Scan for **/*.classes.ts files in src/bun.js/src +/// 3. Generate a JS wrapper for each class in: +/// - Zig: generated_classes.zig +/// - C++: ZigGeneratedClasses.h, ZigGeneratedClasses.cpp +/// 4. For the Zig code to successfully compile: +/// - Add it to generated_classes_list.zig +/// - Expose the generated methods: +/// ```zig +/// pub const js = JSC.Codegen.JSMyClassName; +/// pub const toJS = js.toJS; +/// pub const fromJS = js.fromJS; +/// pub const fromJSDirect = js.fromJSDirect; +/// ``` +/// 5. `bun run build` +/// +pub const Codegen = @import("ZigGeneratedClasses"); +pub const GeneratedClassesList = @import("bindings/generated_classes_list.zig").Classes; + +pub const RuntimeTranspilerCache = @import("RuntimeTranspilerCache.zig").RuntimeTranspilerCache; + +/// Track whether an object should keep the event loop alive +pub const Ref = struct { + has: bool = false, + + pub fn init() Ref { + return .{}; + } + + pub fn unref(this: *Ref, vm: *VirtualMachine) void { + if (!this.has) + return; + this.has = false; + vm.active_tasks -= 1; + } + + pub fn ref(this: *Ref, vm: *VirtualMachine) void { + if (this.has) + return; + this.has = true; + vm.active_tasks += 1; + } +}; + +pub const OpaqueCallback = *const fn (current: ?*anyopaque) callconv(.C) void; +pub fn OpaqueWrap(comptime Context: type, comptime Function: fn (this: *Context) void) OpaqueCallback { + return struct { + pub fn callback(ctx: ?*anyopaque) callconv(.C) void { + const context: *Context = @as(*Context, @ptrCast(@alignCast(ctx.?))); + Function(context); + } + }.callback; +} + +pub const Error = @import("ErrorCode").Error; + +/// According to https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Date, +/// maximum Date in JavaScript is less than Number.MAX_SAFE_INTEGER (u52). +pub const init_timestamp = std.math.maxInt(JSTimeType); +pub const JSTimeType = u52; +pub fn toJSTime(sec: isize, nsec: isize) JSTimeType { + const millisec = @as(u64, @intCast(@divTrunc(nsec, std.time.ns_per_ms))); + return @as(JSTimeType, @truncate(@as(u64, @intCast(sec * std.time.ms_per_s)) + millisec)); +} + +pub const MAX_SAFE_INTEGER = 9007199254740991; +pub const MIN_SAFE_INTEGER = -9007199254740991; + +extern "c" fn JSCInitialize(env: [*]const [*:0]u8, count: usize, cb: *const fn ([*]const u8, len: usize) callconv(.C) void, eval_mode: bool) void; +fn onJSCInvalidEnvVar(name: [*]const u8, len: usize) callconv(.C) void { + bun.Output.errGeneric( + \\invalid JSC environment variable + \\ + \\ {s} + \\ + \\For a list of options, see this file: + \\ + \\ https://github.com/oven-sh/webkit/blob/main/Source/JavaScriptCore/runtime/OptionsList.h + \\ + \\Environment variables must be prefixed with "BUN_JSC_". This code runs before .env files are loaded, so those won't work here. + \\ + \\Warning: options change between releases of Bun and WebKit without notice. This is not a stable API, you should not rely on it beyond debugging something, and it may be removed entirely in a future version of Bun. + , + .{name[0..len]}, + ); + bun.Global.exit(1); +} + +const bun = @import("bun"); +const std = @import("std"); diff --git a/src/bun.js/jsc/RefString.zig b/src/bun.js/jsc/RefString.zig new file mode 100644 index 0000000000..fcfbe42834 --- /dev/null +++ b/src/bun.js/jsc/RefString.zig @@ -0,0 +1,60 @@ +//! expensive heap reference-counted string type +//! only use this for big strings +//! like source code +//! not little ones +const RefString = @This(); + +ptr: [*]const u8 = undefined, +len: usize = 0, +hash: Hash = 0, +impl: bun.WTF.StringImpl, + +allocator: std.mem.Allocator, + +ctx: ?*anyopaque = null, +onBeforeDeinit: ?*const Callback = null, + +pub const Hash = u32; +pub const Map = std.HashMap(Hash, *RefString, bun.IdentityContext(Hash), 80); + +pub fn toJS(this: *RefString, global: *jsc.JSGlobalObject) jsc.JSValue { + return bun.String.init(this.impl).toJS(global); +} + +pub const Callback = fn (ctx: *anyopaque, str: *RefString) void; + +pub fn computeHash(input: []const u8) u32 { + return std.hash.XxHash32.hash(0, input); +} + +pub fn slice(this: *RefString) []const u8 { + this.ref(); + + return this.leak(); +} + +pub fn ref(this: *RefString) void { + this.impl.ref(); +} + +pub fn leak(this: RefString) []const u8 { + @setRuntimeSafety(false); + return this.ptr[0..this.len]; +} + +pub fn deref(this: *RefString) void { + this.impl.deref(); +} + +pub fn deinit(this: *RefString) void { + if (this.onBeforeDeinit) |onBeforeDeinit| { + onBeforeDeinit(this.ctx.?, this); + } + + this.allocator.free(this.leak()); + this.allocator.destroy(this); +} + +const bun = @import("bun"); +const jsc = bun.jsc; +const std = @import("std"); diff --git a/src/bun.js/jsc/array_buffer.zig b/src/bun.js/jsc/array_buffer.zig new file mode 100644 index 0000000000..7be8abebee --- /dev/null +++ b/src/bun.js/jsc/array_buffer.zig @@ -0,0 +1,585 @@ +pub const ArrayBuffer = extern struct { + ptr: [*]u8 = &[0]u8{}, + offset: usize = 0, + len: usize = 0, + byte_len: usize = 0, + typed_array_type: JSC.JSValue.JSType = .Cell, + value: JSC.JSValue = JSC.JSValue.zero, + shared: bool = false, + + // require('buffer').kMaxLength. + // keep in sync with Bun::Buffer::kMaxLength + pub const max_size = std.math.maxInt(c_uint); + + extern fn JSBuffer__fromMmap(*JSC.JSGlobalObject, addr: *anyopaque, len: usize) JSC.JSValue; + + // 4 MB or so is pretty good for mmap() + const mmap_threshold = 1024 * 1024 * 4; + + pub fn bytesPerElement(this: *const ArrayBuffer) ?u8 { + return switch (this.typed_array_type) { + .ArrayBuffer, .DataView => null, + .Uint8Array, .Uint8ClampedArray, .Int8Array => 1, + .Uint16Array, .Int16Array, .Float16Array => 2, + .Uint32Array, .Int32Array, .Float32Array => 4, + .BigUint64Array, .BigInt64Array, .Float64Array => 8, + else => null, + }; + } + + /// Only use this when reading from the file descriptor is _very_ cheap. Like, for example, an in-memory file descriptor. + /// Do not use this for pipes, however tempting it may seem. + pub fn toJSBufferFromFd(fd: bun.FileDescriptor, size: usize, globalObject: *JSC.JSGlobalObject) JSC.JSValue { + const buffer_value = Bun__createUint8ArrayForCopy(globalObject, null, size, true); + if (buffer_value == .zero) { + return .zero; + } + + var array_buffer = buffer_value.asArrayBuffer(globalObject) orelse @panic("Unexpected"); + var bytes = array_buffer.byteSlice(); + + buffer_value.ensureStillAlive(); + + var read: isize = 0; + while (bytes.len > 0) { + switch (bun.sys.pread(fd, bytes, read)) { + .result => |amount| { + bytes = bytes[amount..]; + read += @intCast(amount); + + if (amount == 0) { + if (bytes.len > 0) { + @memset(bytes, 0); + } + break; + } + }, + .err => |err| { + return globalObject.throwValue(err.toJSC(globalObject)) catch .zero; + }, + } + } + + buffer_value.ensureStillAlive(); + + return buffer_value; + } + + extern fn ArrayBuffer__fromSharedMemfd(fd: i64, globalObject: *JSC.JSGlobalObject, byte_offset: usize, byte_length: usize, total_size: usize, JSC.JSValue.JSType) JSC.JSValue; + pub const toArrayBufferFromSharedMemfd = ArrayBuffer__fromSharedMemfd; + + pub fn toJSBufferFromMemfd(fd: bun.FileDescriptor, globalObject: *JSC.JSGlobalObject) bun.JSError!JSC.JSValue { + const stat = switch (bun.sys.fstat(fd)) { + .err => |err| { + fd.close(); + return globalObject.throwValue(err.toJSC(globalObject)); + }, + .result => |fstat| fstat, + }; + + const size = stat.size; + + if (size == 0) { + fd.close(); + return createBuffer(globalObject, ""); + } + + // mmap() is kind of expensive to do + // It creates a new memory mapping. + // If there is a lot of repetitive memory allocations in a tight loop, it performs poorly. + // So we clone it when it's small. + if (size < mmap_threshold) { + const result = toJSBufferFromFd(fd, @intCast(size), globalObject); + fd.close(); + return result; + } + + const result = bun.sys.mmap( + null, + @intCast(@max(size, 0)), + std.posix.PROT.READ | std.posix.PROT.WRITE, + .{ .TYPE = .SHARED }, + fd, + 0, + ); + fd.close(); + + switch (result) { + .result => |buf| { + return JSBuffer__fromMmap(globalObject, buf.ptr, buf.len); + }, + .err => |err| { + return globalObject.throwValue(err.toJSC(globalObject)); + }, + } + } + + pub const Strong = struct { + array_buffer: ArrayBuffer, + held: JSC.Strong = .empty, + + pub fn clear(this: *ArrayBuffer.Strong) void { + var ref: *bun.api.napi.Ref = this.ref orelse return; + ref.set(JSC.JSValue.zero); + } + + pub fn slice(this: *const ArrayBuffer.Strong) []u8 { + return this.array_buffer.slice(); + } + + pub fn deinit(this: *ArrayBuffer.Strong) void { + this.held.deinit(); + } + }; + + pub const empty = ArrayBuffer{ .offset = 0, .len = 0, .byte_len = 0, .typed_array_type = .Uint8Array, .ptr = undefined }; + + pub const name = "Bun__ArrayBuffer"; + pub const Stream = std.io.FixedBufferStream([]u8); + + pub inline fn stream(this: ArrayBuffer) Stream { + return Stream{ .pos = 0, .buf = this.slice() }; + } + + // TODO: this can throw an error! should use JSError!JSValue + pub fn create(globalThis: *JSC.JSGlobalObject, bytes: []const u8, comptime kind: JSC.JSValue.JSType) JSC.JSValue { + JSC.markBinding(@src()); + return switch (comptime kind) { + .Uint8Array => Bun__createUint8ArrayForCopy(globalThis, bytes.ptr, bytes.len, false), + .ArrayBuffer => Bun__createArrayBufferForCopy(globalThis, bytes.ptr, bytes.len), + else => @compileError("Not implemented yet"), + }; + } + + pub fn createEmpty(globalThis: *JSC.JSGlobalObject, comptime kind: JSC.JSValue.JSType) JSC.JSValue { + JSC.markBinding(@src()); + + return switch (comptime kind) { + .Uint8Array => Bun__createUint8ArrayForCopy(globalThis, null, 0, false), + .ArrayBuffer => Bun__createArrayBufferForCopy(globalThis, null, 0), + else => @compileError("Not implemented yet"), + }; + } + + pub fn createBuffer(globalThis: *JSC.JSGlobalObject, bytes: []const u8) JSC.JSValue { + JSC.markBinding(@src()); + return Bun__createUint8ArrayForCopy(globalThis, bytes.ptr, bytes.len, true); + } + + pub fn createUint8Array(globalThis: *JSC.JSGlobalObject, bytes: []const u8) JSC.JSValue { + JSC.markBinding(@src()); + return Bun__createUint8ArrayForCopy(globalThis, bytes.ptr, bytes.len, false); + } + + extern "c" fn Bun__allocUint8ArrayForCopy(*JSC.JSGlobalObject, usize, **anyopaque) JSC.JSValue; + extern "c" fn Bun__allocArrayBufferForCopy(*JSC.JSGlobalObject, usize, **anyopaque) JSC.JSValue; + + pub fn alloc(global: *JSC.JSGlobalObject, comptime kind: JSC.JSValue.JSType, len: u32) JSError!struct { JSC.JSValue, []u8 } { + var ptr: [*]u8 = undefined; + const buf = switch (comptime kind) { + .Uint8Array => Bun__allocUint8ArrayForCopy(global, len, @ptrCast(&ptr)), + .ArrayBuffer => Bun__allocArrayBufferForCopy(global, len, @ptrCast(&ptr)), + else => @compileError("Not implemented yet"), + }; + if (buf == .zero) { + return error.JSError; + } + return .{ buf, ptr[0..len] }; + } + + extern "c" fn Bun__createUint8ArrayForCopy(*JSC.JSGlobalObject, ptr: ?*const anyopaque, len: usize, buffer: bool) JSC.JSValue; + extern "c" fn Bun__createArrayBufferForCopy(*JSC.JSGlobalObject, ptr: ?*const anyopaque, len: usize) JSC.JSValue; + + pub fn fromTypedArray(ctx: *JSC.JSGlobalObject, value: JSC.JSValue) ArrayBuffer { + var out: ArrayBuffer = .{}; + const was = value.asArrayBuffer_(ctx, &out); + bun.assert(was); + out.value = value; + return out; + } + + extern "c" fn JSArrayBuffer__fromDefaultAllocator(*JSC.JSGlobalObject, ptr: [*]u8, len: usize) JSC.JSValue; + pub fn toJSFromDefaultAllocator(globalThis: *JSC.JSGlobalObject, bytes: []u8) JSC.JSValue { + return JSArrayBuffer__fromDefaultAllocator(globalThis, bytes.ptr, bytes.len); + } + + pub fn fromDefaultAllocator(globalThis: *JSC.JSGlobalObject, bytes: []u8, comptime typed_array_type: JSC.JSValue.JSType) JSC.JSValue { + return switch (typed_array_type) { + .ArrayBuffer => JSArrayBuffer__fromDefaultAllocator(globalThis, bytes.ptr, bytes.len), + .Uint8Array => JSC.JSUint8Array.fromBytes(globalThis, bytes), + else => @compileError("Not implemented yet"), + }; + } + + pub fn fromBytes(bytes: []u8, typed_array_type: JSC.JSValue.JSType) ArrayBuffer { + return ArrayBuffer{ .offset = 0, .len = @as(u32, @intCast(bytes.len)), .byte_len = @as(u32, @intCast(bytes.len)), .typed_array_type = typed_array_type, .ptr = bytes.ptr }; + } + + pub fn toJSUnchecked(this: ArrayBuffer, ctx: *JSC.JSGlobalObject, exception: JSC.C.ExceptionRef) JSC.JSValue { + + // The reason for this is + // JSC C API returns a detached arraybuffer + // if you pass it a zero-length TypedArray + // we don't ever want to send the user a detached arraybuffer + // that's just silly. + if (this.byte_len == 0) { + if (this.typed_array_type == .ArrayBuffer) { + return create(ctx, "", .ArrayBuffer); + } + + if (this.typed_array_type == .Uint8Array) { + return create(ctx, "", .Uint8Array); + } + + // TODO: others + } + + if (this.typed_array_type == .ArrayBuffer) { + return JSC.JSValue.fromRef(JSC.C.JSObjectMakeArrayBufferWithBytesNoCopy( + ctx, + this.ptr, + this.byte_len, + MarkedArrayBuffer_deallocator, + @as(*anyopaque, @ptrFromInt(@intFromPtr(&bun.default_allocator))), + exception, + )); + } + + return JSC.JSValue.fromRef(JSC.C.JSObjectMakeTypedArrayWithBytesNoCopy( + ctx, + this.typed_array_type.toC(), + this.ptr, + this.byte_len, + MarkedArrayBuffer_deallocator, + @as(*anyopaque, @ptrFromInt(@intFromPtr(&bun.default_allocator))), + exception, + )); + } + + const log = Output.scoped(.ArrayBuffer, false); + + pub fn toJS(this: ArrayBuffer, ctx: *JSC.JSGlobalObject, exception: JSC.C.ExceptionRef) JSC.JSValue { + if (this.value != .zero) { + return this.value; + } + + // If it's not a mimalloc heap buffer, we're not going to call a deallocator + if (this.len > 0 and !bun.Mimalloc.mi_is_in_heap_region(this.ptr)) { + log("toJS but will never free: {d} bytes", .{this.len}); + + if (this.typed_array_type == .ArrayBuffer) { + return JSC.JSValue.fromRef(JSC.C.JSObjectMakeArrayBufferWithBytesNoCopy( + ctx, + this.ptr, + this.byte_len, + null, + null, + exception, + )); + } + + return JSC.JSValue.fromRef(JSC.C.JSObjectMakeTypedArrayWithBytesNoCopy( + ctx, + this.typed_array_type.toC(), + this.ptr, + this.byte_len, + null, + null, + exception, + )); + } + + return this.toJSUnchecked(ctx, exception); + } + + pub fn toJSWithContext( + this: ArrayBuffer, + ctx: *JSC.JSGlobalObject, + deallocator: ?*anyopaque, + callback: JSC.C.JSTypedArrayBytesDeallocator, + exception: JSC.C.ExceptionRef, + ) JSC.JSValue { + if (this.value != .zero) { + return this.value; + } + + if (this.typed_array_type == .ArrayBuffer) { + return JSC.JSValue.fromRef(JSC.C.JSObjectMakeArrayBufferWithBytesNoCopy( + ctx, + this.ptr, + this.byte_len, + callback, + deallocator, + exception, + )); + } + + return JSC.JSValue.fromRef(JSC.C.JSObjectMakeTypedArrayWithBytesNoCopy( + ctx, + this.typed_array_type.toC(), + this.ptr, + this.byte_len, + callback, + deallocator, + exception, + )); + } + + pub const fromArrayBuffer = fromTypedArray; + + /// The equivalent of + /// + /// ```js + /// new ArrayBuffer(view.buffer, view.byteOffset, view.byteLength) + /// ``` + pub inline fn byteSlice(this: *const @This()) []u8 { + return this.ptr[this.offset..][0..this.byte_len]; + } + + /// The equivalent of + /// + /// ```js + /// new ArrayBuffer(view.buffer, view.byteOffset, view.byteLength) + /// ``` + pub const slice = byteSlice; + + pub inline fn asU16(this: *const @This()) []u16 { + return std.mem.bytesAsSlice(u16, @as([*]u16, @ptrCast(@alignCast(this.ptr)))[this.offset..this.byte_len]); + } + + pub inline fn asU16Unaligned(this: *const @This()) []align(1) u16 { + return std.mem.bytesAsSlice(u16, @as([*]align(1) u16, @ptrCast(@alignCast(this.ptr)))[this.offset..this.byte_len]); + } + + pub inline fn asU32(this: *const @This()) []u32 { + return std.mem.bytesAsSlice(u32, @as([*]u32, @ptrCast(@alignCast(this.ptr)))[this.offset..this.byte_len]); + } + + pub const BinaryType = enum(u4) { + Buffer, + ArrayBuffer, + Uint8Array, + Uint16Array, + Uint32Array, + Int8Array, + Int16Array, + Int32Array, + Float16Array, + Float32Array, + Float64Array, + // DataView, + + pub fn toJSType(this: BinaryType) JSC.JSValue.JSType { + return switch (this) { + .ArrayBuffer => .ArrayBuffer, + .Buffer => .Uint8Array, + // .DataView => .DataView, + .Float32Array => .Float32Array, + .Float16Array => .Float16Array, + .Float64Array => .Float64Array, + .Int16Array => .Int16Array, + .Int32Array => .Int32Array, + .Int8Array => .Int8Array, + .Uint16Array => .Uint16Array, + .Uint32Array => .Uint32Array, + .Uint8Array => .Uint8Array, + }; + } + + pub fn toTypedArrayType(this: BinaryType) JSC.C.JSTypedArrayType { + return this.toJSType().toC(); + } + + pub const Map = bun.ComptimeStringMap( + BinaryType, + .{ + .{ "ArrayBuffer", .ArrayBuffer }, + .{ "Buffer", .Buffer }, + // .{ "DataView", .DataView }, + .{ "Float32Array", .Float32Array }, + .{ "Float16Array", .Float16Array }, + .{ "Float64Array", .Float64Array }, + .{ "Int16Array", .Int16Array }, + .{ "Int32Array", .Int32Array }, + .{ "Int8Array", .Int8Array }, + .{ "Uint16Array", .Uint16Array }, + .{ "Uint32Array", .Uint32Array }, + .{ "Uint8Array", .Uint8Array }, + .{ "arraybuffer", .ArrayBuffer }, + .{ "buffer", .Buffer }, + // .{ "dataview", .DataView }, + .{ "float16array", .Float16Array }, + .{ "float32array", .Float32Array }, + .{ "float64array", .Float64Array }, + .{ "int16array", .Int16Array }, + .{ "int32array", .Int32Array }, + .{ "int8array", .Int8Array }, + .{ "nodebuffer", .Buffer }, + .{ "uint16array", .Uint16Array }, + .{ "uint32array", .Uint32Array }, + .{ "uint8array", .Uint8Array }, + }, + ); + + pub fn fromString(input: []const u8) ?BinaryType { + return Map.get(input); + } + + pub fn fromJSValue(globalThis: *JSC.JSGlobalObject, input: JSC.JSValue) bun.JSError!?BinaryType { + if (input.isString()) { + return Map.getWithEql(try input.toBunString(globalThis), bun.String.eqlComptime); + } + + return null; + } + + /// This clones bytes + pub fn toJS(this: BinaryType, bytes: []const u8, globalThis: *JSC.JSGlobalObject) JSC.JSValue { + switch (this) { + .Buffer => return JSC.ArrayBuffer.createBuffer(globalThis, bytes), + .ArrayBuffer => return JSC.ArrayBuffer.create(globalThis, bytes, .ArrayBuffer), + .Uint8Array => return JSC.ArrayBuffer.create(globalThis, bytes, .Uint8Array), + + // These aren't documented, but they are supported + .Uint16Array, .Uint32Array, .Int8Array, .Int16Array, .Int32Array, .Float16Array, .Float32Array, .Float64Array => { + const buffer = JSC.ArrayBuffer.create(globalThis, bytes, .ArrayBuffer); + return JSC.JSValue.c(JSC.C.JSObjectMakeTypedArrayWithArrayBuffer(globalThis, this.toTypedArrayType(), buffer.asObjectRef(), null)); + }, + } + } + }; +}; + +pub const MarkedArrayBuffer = struct { + buffer: ArrayBuffer = .{}, + allocator: ?std.mem.Allocator = null, + + pub inline fn stream(this: *MarkedArrayBuffer) ArrayBuffer.Stream { + return this.buffer.stream(); + } + + pub fn fromTypedArray(ctx: *JSC.JSGlobalObject, value: JSC.JSValue) MarkedArrayBuffer { + return MarkedArrayBuffer{ + .allocator = null, + .buffer = ArrayBuffer.fromTypedArray(ctx, value), + }; + } + + pub fn fromArrayBuffer(ctx: *JSC.JSGlobalObject, value: JSC.JSValue) MarkedArrayBuffer { + return MarkedArrayBuffer{ + .allocator = null, + .buffer = ArrayBuffer.fromArrayBuffer(ctx, value), + }; + } + + pub fn fromString(str: []const u8, allocator: std.mem.Allocator) !MarkedArrayBuffer { + const buf = try allocator.dupe(u8, str); + return MarkedArrayBuffer.fromBytes(buf, allocator, JSC.JSValue.JSType.Uint8Array); + } + + pub fn fromJS(global: *JSC.JSGlobalObject, value: JSC.JSValue) ?MarkedArrayBuffer { + const array_buffer = value.asArrayBuffer(global) orelse return null; + return MarkedArrayBuffer{ .buffer = array_buffer, .allocator = null }; + } + + pub fn fromBytes(bytes: []u8, allocator: std.mem.Allocator, typed_array_type: JSC.JSValue.JSType) MarkedArrayBuffer { + return MarkedArrayBuffer{ + .buffer = ArrayBuffer.fromBytes(bytes, typed_array_type), + .allocator = allocator, + }; + } + + pub const empty = MarkedArrayBuffer{ + .allocator = null, + .buffer = ArrayBuffer.empty, + }; + + pub inline fn slice(this: *const @This()) []u8 { + return this.buffer.byteSlice(); + } + + pub fn destroy(this: *MarkedArrayBuffer) void { + const content = this.*; + if (this.allocator) |allocator| { + this.allocator = null; + allocator.free(content.buffer.slice()); + allocator.destroy(this); + } + } + + pub fn init(allocator: std.mem.Allocator, size: u32, typed_array_type: JSC.JSValue.JSType) !*MarkedArrayBuffer { + const bytes = try allocator.alloc(u8, size); + const container = try allocator.create(MarkedArrayBuffer); + container.* = MarkedArrayBuffer.fromBytes(bytes, allocator, typed_array_type); + return container; + } + + pub fn toNodeBuffer(this: *const MarkedArrayBuffer, ctx: *JSC.JSGlobalObject) JSC.JSValue { + return JSC.JSValue.createBufferWithCtx(ctx, this.buffer.byteSlice(), this.buffer.ptr, MarkedArrayBuffer_deallocator); + } + + pub fn toJSObjectRef(this: *const MarkedArrayBuffer, ctx: *JSC.JSGlobalObject, exception: JSC.C.ExceptionRef) bun.JSC.C.JSObjectRef { + if (!this.buffer.value.isEmptyOrUndefinedOrNull()) { + return this.buffer.value.asObjectRef(); + } + if (this.buffer.byte_len == 0) { + return JSC.C.JSObjectMakeTypedArray( + ctx, + this.buffer.typed_array_type.toC(), + 0, + exception, + ); + } + + return JSC.C.JSObjectMakeTypedArrayWithBytesNoCopy( + ctx, + this.buffer.typed_array_type.toC(), + this.buffer.ptr, + + this.buffer.byte_len, + MarkedArrayBuffer_deallocator, + this.buffer.ptr, + exception, + ); + } + + // TODO: refactor this + pub fn toJS(this: *const MarkedArrayBuffer, globalObject: *JSC.JSGlobalObject) JSC.JSValue { + var exception = [_]JSC.C.JSValueRef{null}; + const obj = this.toJSObjectRef(globalObject, &exception); + + if (exception[0] != null) { + return globalObject.throwValue(JSC.JSValue.c(exception[0])) catch return .zero; + } + + return JSC.JSValue.c(obj); + } +}; + +pub export fn MarkedArrayBuffer_deallocator(bytes_: *anyopaque, _: *anyopaque) void { + const mimalloc = bun.Mimalloc; + // zig's memory allocator interface won't work here + // mimalloc knows the size of things + // but we don't + // if (comptime Environment.allow_assert) { + // bun.assert(mimalloc.mi_check_owned(bytes_) or + // mimalloc.mi_heap_check_owned(JSC.VirtualMachine.get().arena.heap.?, bytes_)); + // } + + mimalloc.mi_free(bytes_); +} + +pub export fn BlobArrayBuffer_deallocator(_: *anyopaque, blob: *anyopaque) void { + // zig's memory allocator interface won't work here + // mimalloc knows the size of things + // but we don't + var store = bun.cast(*JSC.WebCore.Blob.Store, blob); + store.deref(); +} + +const bun = @import("bun"); +const JSC = bun.jsc; +const Output = bun.Output; +const JSError = bun.JSError; + +const std = @import("std"); diff --git a/src/bun.js/jsc/dom_call.zig b/src/bun.js/jsc/dom_call.zig new file mode 100644 index 0000000000..e69de29bb2 diff --git a/src/bun.js/jsc/host_fn.zig b/src/bun.js/jsc/host_fn.zig new file mode 100644 index 0000000000..bda2ea846b --- /dev/null +++ b/src/bun.js/jsc/host_fn.zig @@ -0,0 +1,712 @@ +/// A host function is the native function pointer type that can be used by a +/// JSC::JSFunction to call native code from JavaScript. +pub const JSHostFn = fn (*JSGlobalObject, *CallFrame) callconv(jsc.conv) JSValue; +/// To allow usage of `try` for error handling, Bun provides `toJSHostFn` to +/// wrap this type into a JSHostFn. +pub const JSHostFnZig = fn (*JSGlobalObject, *CallFrame) bun.JSError!JSValue; + +pub fn JSHostFnZigWithContext(comptime ContextType: type) type { + return fn (*ContextType, *JSGlobalObject, *CallFrame) bun.JSError!JSValue; +} + +pub fn JSHostFunctionTypeWithContext(comptime ContextType: type) type { + return fn (*ContextType, *JSGlobalObject, *CallFrame) callconv(jsc.conv) JSValue; +} + +pub fn toJSHostFn(comptime functionToWrap: JSHostFnZig) JSHostFn { + return struct { + pub fn function(globalThis: *JSGlobalObject, callframe: *CallFrame) callconv(jsc.conv) JSValue { + if (bun.Environment.allow_assert and bun.Environment.is_canary) { + const value = functionToWrap(globalThis, callframe) catch |err| switch (err) { + error.JSError => .zero, + error.OutOfMemory => globalThis.throwOutOfMemoryValue(), + }; + if (comptime bun.Environment.isDebug) { + if (value != .zero) { + if (globalThis.hasException()) { + var formatter = jsc.ConsoleObject.Formatter{ .globalThis = globalThis }; + defer formatter.deinit(); + bun.Output.err("Assertion failed", + \\Native function returned a non-zero JSValue while an exception is pending + \\ + \\ fn: {s} + \\ value: {} + \\ + , .{ + &functionToWrap, // use `(lldb) image lookup --address 0x1ec4` to discover what function failed + value.toFmt(&formatter), + }); + bun.Output.flush(); + } + } + } + bun.assert((value == .zero) == globalThis.hasException()); + return value; + } + return @call(.always_inline, functionToWrap, .{ globalThis, callframe }) catch |err| switch (err) { + error.JSError => .zero, + error.OutOfMemory => globalThis.throwOutOfMemoryValue(), + }; + } + }.function; +} + +pub fn toJSHostFnWithContext(comptime ContextType: type, comptime Function: JSHostFnZigWithContext(ContextType)) JSHostFunctionTypeWithContext(ContextType) { + return struct { + pub fn function(ctx: *ContextType, globalThis: *JSGlobalObject, callframe: *CallFrame) callconv(jsc.conv) JSValue { + if (bun.Environment.allow_assert and bun.Environment.is_canary) { + const value = Function(ctx, globalThis, callframe) catch |err| switch (err) { + error.JSError => .zero, + error.OutOfMemory => globalThis.throwOutOfMemoryValue(), + }; + if (comptime bun.Environment.isDebug) { + if (value != .zero) { + if (globalThis.hasException()) { + var formatter = jsc.ConsoleObject.Formatter{ .globalThis = globalThis }; + defer formatter.deinit(); + bun.Output.err("Assertion failed", + \\Native function returned a non-zero JSValue while an exception is pending + \\ + \\ fn: {s} + \\ value: {} + \\ + , .{ + &Function, // use `(lldb) image lookup --address 0x1ec4` to discover what function failed + value.toFmt(&formatter), + }); + bun.Output.flush(); + } + } + } + bun.assert((value == .zero) == globalThis.hasException()); + return value; + } + return @call(.always_inline, Function, .{ ctx, globalThis, callframe }) catch |err| switch (err) { + error.JSError => .zero, + error.OutOfMemory => globalThis.throwOutOfMemoryValue(), + }; + } + }.function; +} + +pub fn toJSHostValue(globalThis: *JSGlobalObject, value: error{ OutOfMemory, JSError }!JSValue) JSValue { + if (bun.Environment.allow_assert and bun.Environment.is_canary) { + const normal = value catch |err| switch (err) { + error.JSError => .zero, + error.OutOfMemory => globalThis.throwOutOfMemoryValue(), + }; + bun.assert((normal == .zero) == globalThis.hasException()); + return normal; + } + return value catch |err| switch (err) { + error.JSError => .zero, + error.OutOfMemory => globalThis.throwOutOfMemoryValue(), + }; +} + +const ParsedHostFunctionErrorSet = struct { + OutOfMemory: bool = false, + JSError: bool = false, +}; + +inline fn parseErrorSet(T: type, errors: []const std.builtin.Type.Error) ParsedHostFunctionErrorSet { + return comptime brk: { + var errs: ParsedHostFunctionErrorSet = .{}; + for (errors) |err| { + if (!@hasField(ParsedHostFunctionErrorSet, err.name)) { + @compileError("Return value from host function '" ++ @typeInfo(T) ++ "' can not contain error '" ++ err.name ++ "'"); + } + @field(errs, err.name) = true; + } + break :brk errs; + }; +} + +const DeinitFunction = *const fn (ctx: *anyopaque, buffer: [*]u8, len: usize) callconv(.C) void; + +const private = struct { + pub extern fn Bun__CreateFFIFunctionWithDataValue( + *JSGlobalObject, + ?*const ZigString, + argCount: u32, + function: *const JSHostFn, + strong: bool, + data: *anyopaque, + ) JSValue; + pub extern fn Bun__CreateFFIFunction( + globalObject: *JSGlobalObject, + symbolName: ?*const ZigString, + argCount: u32, + function: *const JSHostFn, + strong: bool, + ) *anyopaque; + + pub extern fn Bun__CreateFFIFunctionValue( + globalObject: *JSGlobalObject, + symbolName: ?*const ZigString, + argCount: u32, + function: *const JSHostFn, + strong: bool, + add_ptr_field: bool, + inputFunctionPtr: ?*anyopaque, + ) JSValue; + + pub extern fn Bun__untrackFFIFunction( + globalObject: *JSGlobalObject, + function: JSValue, + ) bool; + + pub extern fn Bun__FFIFunction_getDataPtr(JSValue) ?*anyopaque; + pub extern fn Bun__FFIFunction_setDataPtr(JSValue, ?*anyopaque) void; +}; + +pub fn NewFunction( + globalObject: *JSGlobalObject, + symbolName: ?*const ZigString, + argCount: u32, + comptime function: anytype, + strong: bool, +) JSValue { + if (@TypeOf(function) == JSHostFn) { + return NewRuntimeFunction(globalObject, symbolName, argCount, function, strong, false, null); + } + return NewRuntimeFunction(globalObject, symbolName, argCount, toJSHostFn(function), strong, false, null); +} + +pub fn createCallback( + globalObject: *JSGlobalObject, + symbolName: ?*const ZigString, + argCount: u32, + comptime function: anytype, +) JSValue { + if (@TypeOf(function) == JSHostFn) { + return NewRuntimeFunction(globalObject, symbolName, argCount, function, false, false, null); + } + return NewRuntimeFunction(globalObject, symbolName, argCount, toJSHostFn(function), false, false, null); +} + +pub fn NewRuntimeFunction( + globalObject: *JSGlobalObject, + symbolName: ?*const ZigString, + argCount: u32, + functionPointer: *const JSHostFn, + strong: bool, + add_ptr_property: bool, + inputFunctionPtr: ?*anyopaque, +) JSValue { + jsc.markBinding(@src()); + return private.Bun__CreateFFIFunctionValue(globalObject, symbolName, argCount, functionPointer, strong, add_ptr_property, inputFunctionPtr); +} + +pub fn getFunctionData(function: JSValue) ?*anyopaque { + jsc.markBinding(@src()); + return private.Bun__FFIFunction_getDataPtr(function); +} + +pub fn setFunctionData(function: JSValue, value: ?*anyopaque) void { + jsc.markBinding(@src()); + return private.Bun__FFIFunction_setDataPtr(function, value); +} + +pub fn NewFunctionWithData( + globalObject: *JSGlobalObject, + symbolName: ?*const ZigString, + argCount: u32, + comptime function: JSHostFnZig, + strong: bool, + data: *anyopaque, +) JSValue { + jsc.markBinding(@src()); + return private.Bun__CreateFFIFunctionWithDataValue( + globalObject, + symbolName, + argCount, + toJSHostFn(function), + strong, + data, + ); +} + +pub fn untrackFunction( + globalObject: *JSGlobalObject, + value: JSValue, +) bool { + jsc.markBinding(@src()); + return private.Bun__untrackFFIFunction(globalObject, value); +} + +pub const DOMEffect = struct { + reads: [4]ID = std.mem.zeroes([4]ID), + writes: [4]ID = std.mem.zeroes([4]ID), + + pub const top = DOMEffect{ + .reads = .{ ID.Heap, ID.Heap, ID.Heap, ID.Heap }, + .writes = .{ ID.Heap, ID.Heap, ID.Heap, ID.Heap }, + }; + + pub fn forRead(read: ID) DOMEffect { + return DOMEffect{ + .reads = .{ read, ID.Heap, ID.Heap, ID.Heap }, + .writes = .{ ID.Heap, ID.Heap, ID.Heap, ID.Heap }, + }; + } + + pub fn forWrite(read: ID) DOMEffect { + return DOMEffect{ + .writes = .{ read, ID.Heap, ID.Heap, ID.Heap }, + .reads = .{ ID.Heap, ID.Heap, ID.Heap, ID.Heap }, + }; + } + + pub const pure = DOMEffect{}; + + pub fn isPure(this: DOMEffect) bool { + return this.reads[0] == ID.InvalidAbstractHeap and this.writes[0] == ID.InvalidAbstractHeap; + } + + pub const ID = enum(u8) { + InvalidAbstractHeap = 0, + World, + Stack, + Heap, + Butterfly_publicLength, + Butterfly_vectorLength, + GetterSetter_getter, + GetterSetter_setter, + JSCell_cellState, + JSCell_indexingType, + JSCell_structureID, + JSCell_typeInfoFlags, + JSObject_butterfly, + JSPropertyNameEnumerator_cachedPropertyNames, + RegExpObject_lastIndex, + NamedProperties, + IndexedInt32Properties, + IndexedDoubleProperties, + IndexedContiguousProperties, + IndexedArrayStorageProperties, + DirectArgumentsProperties, + ScopeProperties, + TypedArrayProperties, + /// Used to reflect the fact that some allocations reveal object identity */ + HeapObjectCount, + RegExpState, + MathDotRandomState, + JSDateFields, + JSMapFields, + JSSetFields, + JSWeakMapFields, + WeakSetFields, + JSInternalFields, + InternalState, + CatchLocals, + Absolute, + /// DOMJIT tells the heap range with the pair of integers. */ + DOMState, + /// Use this for writes only, to indicate that this may fire watchpoints. Usually this is never directly written but instead we test to see if a node clobbers this; it just so happens that you have to write world to clobber it. */ + Watchpoint_fire, + /// Use these for reads only, just to indicate that if the world got clobbered, then this operation will not work. */ + MiscFields, + /// Use this for writes only, just to indicate that hoisting the node is invalid. This works because we don't hoist anything that has any side effects at all. */ + SideState, + }; +}; + +fn DOMCallArgumentType(comptime Type: type) []const u8 { + const ChildType = if (@typeInfo(Type) == .pointer) std.meta.Child(Type) else Type; + return switch (ChildType) { + i8, u8, i16, u16, i32 => "JSC::SpecInt32Only", + u32, i64, u64 => "JSC::SpecInt52Any", + f64 => "JSC::SpecDoubleReal", + bool => "JSC::SpecBoolean", + jsc.JSString => "JSC::SpecString", + jsc.JSUint8Array => "JSC::SpecUint8Array", + else => @compileError("Unknown DOM type: " ++ @typeName(Type)), + }; +} + +fn DOMCallArgumentTypeWrapper(comptime Type: type) []const u8 { + const ChildType = if (@typeInfo(Type) == .pointer) std.meta.Child(Type) else Type; + return switch (ChildType) { + i32 => "int32_t", + f64 => "double", + u64 => "uint64_t", + i64 => "int64_t", + bool => "bool", + jsc.JSString => "JSC::JSString*", + jsc.JSUint8Array => "JSC::JSUint8Array*", + else => @compileError("Unknown DOM type: " ++ @typeName(Type)), + }; +} + +fn DOMCallResultType(comptime Type: type) []const u8 { + const ChildType = if (@typeInfo(Type) == .pointer) std.meta.Child(Type) else Type; + return switch (ChildType) { + i32 => "JSC::SpecInt32Only", + bool => "JSC::SpecBoolean", + jsc.JSString => "JSC::SpecString", + jsc.JSUint8Array => "JSC::SpecUint8Array", + jsc.JSCell => "JSC::SpecCell", + u52, i52 => "JSC::SpecInt52Any", + f64 => "JSC::SpecDoubleReal", + else => "JSC::SpecHeapTop", + }; +} + +pub fn DOMCall( + comptime class_name: string, + comptime Container: type, + comptime functionName: string, + comptime dom_effect: DOMEffect, +) type { + return extern struct { + const className = class_name; + pub const is_dom_call = true; + const Slowpath = @field(Container, functionName); + const SlowpathType = @TypeOf(@field(Container, functionName)); + + // Zig doesn't support @frameAddress(1) + // so we have to add a small wrapper fujnction + pub fn slowpath( + globalObject: *jsc.JSGlobalObject, + thisValue: jsc.JSValue, + arguments_ptr: [*]const jsc.JSValue, + arguments_len: usize, + ) callconv(jsc.conv) jsc.JSValue { + return jsc.toJSHostValue(globalObject, @field(Container, functionName)(globalObject, thisValue, arguments_ptr[0..arguments_len])); + } + + pub const fastpath = @field(Container, functionName ++ "WithoutTypeChecks"); + pub const Fastpath = @TypeOf(fastpath); + pub const Arguments = std.meta.ArgsTuple(Fastpath); + const PutFnType = *const fn (globalObject: *jsc.JSGlobalObject, value: jsc.JSValue) callconv(.c) void; + const put_fn = @extern(PutFnType, .{ .name = className ++ "__" ++ functionName ++ "__put" }); + + pub fn put(globalObject: *jsc.JSGlobalObject, value: jsc.JSValue) void { + put_fn(globalObject, value); + } + + pub const effect = dom_effect; + + comptime { + @export(&slowpath, .{ .name = className ++ "__" ++ functionName ++ "__slowpath" }); + @export(&fastpath, .{ .name = className ++ "__" ++ functionName ++ "__fastpath" }); + } + }; +} + +pub fn InstanceMethodType(comptime Container: type) type { + return fn (instance: *Container, globalThis: *jsc.JSGlobalObject, callframe: *jsc.CallFrame) bun.JSError!jsc.JSValue; +} + +pub fn wrapInstanceMethod( + comptime Container: type, + comptime name: string, + comptime auto_protect: bool, +) InstanceMethodType(Container) { + return struct { + const FunctionType = @TypeOf(@field(Container, name)); + const FunctionTypeInfo: std.builtin.Type.Fn = @typeInfo(FunctionType).@"fn"; + const Args = std.meta.ArgsTuple(FunctionType); + const eater = if (auto_protect) jsc.CallFrame.ArgumentsSlice.protectEatNext else jsc.CallFrame.ArgumentsSlice.nextEat; + + pub fn method( + this: *Container, + globalThis: *jsc.JSGlobalObject, + callframe: *jsc.CallFrame, + ) bun.JSError!jsc.JSValue { + const arguments = callframe.arguments_old(FunctionTypeInfo.params.len); + var iter = jsc.CallFrame.ArgumentsSlice.init(globalThis.bunVM(), arguments.slice()); + var args: Args = undefined; + + const has_exception_ref: bool = comptime brk: { + for (FunctionTypeInfo.params) |param| { + if (param.type.? == jsc.C.ExceptionRef) { + break :brk true; + } + } + + break :brk false; + }; + var exception_value = [_]jsc.C.JSValueRef{null}; + const exception: jsc.C.ExceptionRef = if (comptime has_exception_ref) &exception_value else undefined; + + inline for (FunctionTypeInfo.params, 0..) |param, i| { + const ArgType = param.type.?; + switch (ArgType) { + *Container => { + args[i] = this; + }, + *jsc.JSGlobalObject => { + args[i] = globalThis; + }, + *jsc.CallFrame => { + args[i] = callframe; + }, + jsc.Node.StringOrBuffer => { + const arg = iter.nextEat() orelse { + iter.deinit(); + return globalThis.throwInvalidArguments("expected string or buffer", .{}); + }; + args[i] = try jsc.Node.StringOrBuffer.fromJS(globalThis, iter.arena.allocator(), arg) orelse { + iter.deinit(); + return globalThis.throwInvalidArguments("expected string or buffer", .{}); + }; + }, + ?jsc.Node.StringOrBuffer => { + if (iter.nextEat()) |arg| { + if (!arg.isEmptyOrUndefinedOrNull()) { + args[i] = try jsc.Node.StringOrBuffer.fromJS(globalThis, iter.arena.allocator(), arg) orelse { + iter.deinit(); + return globalThis.throwInvalidArguments("expected string or buffer", .{}); + }; + } else { + args[i] = null; + } + } else { + args[i] = null; + } + }, + jsc.ArrayBuffer => { + if (iter.nextEat()) |arg| { + args[i] = arg.asArrayBuffer(globalThis) orelse { + iter.deinit(); + return globalThis.throwInvalidArguments("expected TypedArray", .{}); + }; + } else { + iter.deinit(); + return globalThis.throwInvalidArguments("expected TypedArray", .{}); + } + }, + ?jsc.ArrayBuffer => { + if (iter.nextEat()) |arg| { + args[i] = arg.asArrayBuffer(globalThis) orelse { + iter.deinit(); + return globalThis.throwInvalidArguments("expected TypedArray", .{}); + }; + } else { + args[i] = null; + } + }, + jsc.ZigString => { + var string_value = eater(&iter) orelse { + iter.deinit(); + return globalThis.throwInvalidArguments("Missing argument", .{}); + }; + + if (string_value.isUndefinedOrNull()) { + iter.deinit(); + return globalThis.throwInvalidArguments("Expected string", .{}); + } + + args[i] = try string_value.getZigString(globalThis); + }, + ?jsc.Cloudflare.ContentOptions => { + if (iter.nextEat()) |content_arg| { + if (try content_arg.get(globalThis, "html")) |html_val| { + args[i] = .{ .html = html_val.toBoolean() }; + } + } else { + args[i] = null; + } + }, + *jsc.WebCore.Response => { + args[i] = (eater(&iter) orelse { + iter.deinit(); + return globalThis.throwInvalidArguments("Missing Response object", .{}); + }).as(jsc.WebCore.Response) orelse { + iter.deinit(); + return globalThis.throwInvalidArguments("Expected Response object", .{}); + }; + }, + *jsc.WebCore.Request => { + args[i] = (eater(&iter) orelse { + iter.deinit(); + return globalThis.throwInvalidArguments("Missing Request object", .{}); + }).as(jsc.WebCore.Request) orelse { + iter.deinit(); + return globalThis.throwInvalidArguments("Expected Request object", .{}); + }; + }, + jsc.JSValue => { + const val = eater(&iter) orelse { + iter.deinit(); + return globalThis.throwInvalidArguments("Missing argument", .{}); + }; + args[i] = val; + }, + ?jsc.JSValue => { + args[i] = eater(&iter); + }, + jsc.C.ExceptionRef => { + args[i] = exception; + }, + else => @compileError("Unexpected Type " ++ @typeName(ArgType)), + } + } + + defer iter.deinit(); + + defer { + if (comptime has_exception_ref) { + if (exception_value[0] != null) { + globalThis.throwValue(exception_value[0].?.value()); + } + } + } + + return @call(.always_inline, @field(Container, name), args); + } + }.method; +} + +pub fn wrapStaticMethod( + comptime Container: type, + comptime name: string, + comptime auto_protect: bool, +) jsc.JSHostFnZig { + return struct { + const FunctionType = @TypeOf(@field(Container, name)); + const FunctionTypeInfo: std.builtin.Type.Fn = @typeInfo(FunctionType).@"fn"; + const Args = std.meta.ArgsTuple(FunctionType); + const eater = if (auto_protect) jsc.CallFrame.ArgumentsSlice.protectEatNext else jsc.CallFrame.ArgumentsSlice.nextEat; + + pub fn method( + globalThis: *jsc.JSGlobalObject, + callframe: *jsc.CallFrame, + ) bun.JSError!jsc.JSValue { + const arguments = callframe.arguments_old(FunctionTypeInfo.params.len); + var iter = jsc.CallFrame.ArgumentsSlice.init(globalThis.bunVM(), arguments.slice()); + var args: Args = undefined; + + inline for (FunctionTypeInfo.params, 0..) |param, i| { + const ArgType = param.type.?; + switch (param.type.?) { + *jsc.JSGlobalObject => { + args[i] = globalThis; + }, + jsc.Node.StringOrBuffer => { + const arg = iter.nextEat() orelse { + iter.deinit(); + return globalThis.throwInvalidArguments("expected string or buffer", .{}); + }; + args[i] = try jsc.Node.StringOrBuffer.fromJS(globalThis, iter.arena.allocator(), arg) orelse { + iter.deinit(); + return globalThis.throwInvalidArguments("expected string or buffer", .{}); + }; + }, + ?jsc.Node.StringOrBuffer => { + if (iter.nextEat()) |arg| { + args[i] = try jsc.Node.StringOrBuffer.fromJS(globalThis, iter.arena.allocator(), arg) orelse brk: { + if (arg == .undefined) { + break :brk null; + } + + iter.deinit(); + return globalThis.throwInvalidArguments("expected string or buffer", .{}); + }; + } else { + args[i] = null; + } + }, + jsc.Node.BlobOrStringOrBuffer => { + if (iter.nextEat()) |arg| { + args[i] = try jsc.Node.BlobOrStringOrBuffer.fromJS(globalThis, iter.arena.allocator(), arg) orelse { + iter.deinit(); + return globalThis.throwInvalidArguments("expected blob, string or buffer", .{}); + }; + } else { + iter.deinit(); + return globalThis.throwInvalidArguments("expected blob, string or buffer", .{}); + } + }, + jsc.ArrayBuffer => { + if (iter.nextEat()) |arg| { + args[i] = arg.asArrayBuffer(globalThis) orelse { + iter.deinit(); + return globalThis.throwInvalidArguments("expected TypedArray", .{}); + }; + } else { + iter.deinit(); + return globalThis.throwInvalidArguments("expected TypedArray", .{}); + } + }, + ?jsc.ArrayBuffer => { + if (iter.nextEat()) |arg| { + args[i] = arg.asArrayBuffer(globalThis) orelse { + iter.deinit(); + return globalThis.throwInvalidArguments("expected TypedArray", .{}); + }; + } else { + args[i] = null; + } + }, + jsc.ZigString => { + var string_value = eater(&iter) orelse { + iter.deinit(); + return globalThis.throwInvalidArguments("Missing argument", .{}); + }; + + if (string_value.isUndefinedOrNull()) { + iter.deinit(); + return globalThis.throwInvalidArguments("Expected string", .{}); + } + + args[i] = try string_value.getZigString(globalThis); + }, + ?jsc.Cloudflare.ContentOptions => { + if (iter.nextEat()) |content_arg| { + if (try content_arg.get(globalThis, "html")) |html_val| { + args[i] = .{ .html = html_val.toBoolean() }; + } + } else { + args[i] = null; + } + }, + *jsc.WebCore.Response => { + args[i] = (eater(&iter) orelse { + iter.deinit(); + return globalThis.throwInvalidArguments("Missing Response object", .{}); + }).as(jsc.WebCore.Response) orelse { + iter.deinit(); + return globalThis.throwInvalidArguments("Expected Response object", .{}); + }; + }, + *jsc.WebCore.Request => { + args[i] = (eater(&iter) orelse { + iter.deinit(); + return globalThis.throwInvalidArguments("Missing Request object", .{}); + }).as(jsc.WebCore.Request) orelse { + iter.deinit(); + return globalThis.throwInvalidArguments("Expected Request object", .{}); + }; + }, + jsc.JSValue => { + const val = eater(&iter) orelse { + iter.deinit(); + return globalThis.throwInvalidArguments("Missing argument", .{}); + }; + args[i] = val; + }, + ?jsc.JSValue => { + args[i] = eater(&iter); + }, + else => @compileError(std.fmt.comptimePrint("Unexpected Type " ++ @typeName(ArgType) ++ " at argument {d} in {s}#{s}", .{ i, @typeName(Container), name })), + } + } + + defer iter.deinit(); + + return @call(.always_inline, @field(Container, name), args); + } + }.method; +} + +const bun = @import("bun"); +const jsc = bun.jsc; +const JSValue = jsc.JSValue; +const JSGlobalObject = jsc.JSGlobalObject; +const CallFrame = jsc.CallFrame; +const ZigString = jsc.ZigString; +const std = @import("std"); +const string = []const u8; diff --git a/src/bun.js/module_loader.zig b/src/bun.js/module_loader.zig deleted file mode 100644 index de45ad525b..0000000000 --- a/src/bun.js/module_loader.zig +++ /dev/null @@ -1,3080 +0,0 @@ -const std = @import("std"); -const StaticExport = @import("./bindings/static_export.zig"); -const bun = @import("bun"); -const string = bun.string; -const Output = bun.Output; -const Global = bun.Global; -const Environment = bun.Environment; -const strings = bun.strings; -const MutableString = bun.MutableString; -const stringZ = bun.stringZ; -const StoredFileDescriptorType = bun.StoredFileDescriptorType; -const Arena = @import("../allocators/mimalloc_arena.zig").Arena; -const C = bun.C; - -const Allocator = std.mem.Allocator; -const IdentityContext = @import("../identity_context.zig").IdentityContext; -const Fs = @import("../fs.zig"); -const Resolver = @import("../resolver/resolver.zig"); -const ast = @import("../import_record.zig"); -const MacroEntryPoint = bun.transpiler.EntryPoints.MacroEntryPoint; -const ParseResult = bun.transpiler.ParseResult; -const logger = bun.logger; -const Api = @import("../api/schema.zig").Api; -const options = @import("../options.zig"); -const Transpiler = bun.Transpiler; -const PluginRunner = bun.transpiler.PluginRunner; -const js_printer = bun.js_printer; -const js_parser = bun.js_parser; -const js_ast = bun.JSAst; -const NodeFallbackModules = @import("../node_fallbacks.zig"); -const ImportKind = ast.ImportKind; -const Analytics = @import("../analytics/analytics_thread.zig"); -const ZigString = bun.JSC.ZigString; -const Runtime = @import("../runtime.zig"); -const Router = @import("./api/filesystem_router.zig"); -const ImportRecord = ast.ImportRecord; -const DotEnv = @import("../env_loader.zig"); -const PackageJSON = @import("../resolver/package_json.zig").PackageJSON; -const MacroRemap = @import("../resolver/package_json.zig").MacroMap; -const JSC = bun.JSC; -const MarkedArrayBuffer = @import("./base.zig").MarkedArrayBuffer; -const getAllocator = @import("./base.zig").getAllocator; -const JSValue = bun.JSC.JSValue; -const node_module_module = @import("./bindings/NodeModuleModule.zig"); - -const JSGlobalObject = bun.JSC.JSGlobalObject; -const ConsoleObject = bun.JSC.ConsoleObject; -const ZigException = bun.JSC.ZigException; -const ZigStackTrace = bun.JSC.ZigStackTrace; -const ResolvedSource = bun.JSC.ResolvedSource; -const JSPromise = bun.JSC.JSPromise; -const JSModuleLoader = bun.JSC.JSModuleLoader; -const JSPromiseRejectionOperation = bun.JSC.JSPromiseRejectionOperation; -const ErrorableZigString = bun.JSC.ErrorableZigString; -const VM = bun.JSC.VM; -const JSFunction = bun.JSC.JSFunction; -const Config = @import("./config.zig"); -const URL = @import("../url.zig").URL; -const Bun = JSC.API.Bun; -const EventLoop = JSC.EventLoop; -const PendingResolution = @import("../resolver/resolver.zig").PendingResolution; -const ThreadSafeFunction = JSC.napi.ThreadSafeFunction; -const PackageManager = @import("../install/install.zig").PackageManager; -const Install = @import("../install/install.zig"); -const VirtualMachine = bun.JSC.VirtualMachine; -const Dependency = @import("../install/dependency.zig"); -const Async = bun.Async; -const String = bun.String; -const ModuleType = options.ModuleType; - -const debug = Output.scoped(.ModuleLoader, true); -const panic = std.debug.panic; - -inline fn jsSyntheticModule(name: ResolvedSource.Tag, specifier: String) ResolvedSource { - return ResolvedSource{ - .allocator = null, - .source_code = bun.String.empty, - .specifier = specifier, - .source_url = bun.String.static(@tagName(name)), - .tag = name, - .source_code_needs_deref = false, - }; -} - -/// Dumps the module source to a file in /tmp/bun-debug-src/{filepath} -/// -/// This can technically fail if concurrent access across processes happens, or permission issues. -/// Errors here should always be ignored. -fn dumpSource(vm: *VirtualMachine, specifier: string, printer: anytype) void { - dumpSourceString(vm, specifier, printer.ctx.getWritten()); -} - -fn dumpSourceString(vm: *VirtualMachine, specifier: string, written: []const u8) void { - dumpSourceStringFailiable(vm, specifier, written) catch |e| { - Output.debugWarn("Failed to dump source string: {}", .{e}); - }; -} - -fn dumpSourceStringFailiable(vm: *VirtualMachine, specifier: string, written: []const u8) !void { - if (!Environment.isDebug) return; - if (bun.getRuntimeFeatureFlag("BUN_DEBUG_NO_DUMP")) return; - - const BunDebugHolder = struct { - pub var dir: ?std.fs.Dir = null; - pub var lock: bun.Mutex = .{}; - }; - - BunDebugHolder.lock.lock(); - defer BunDebugHolder.lock.unlock(); - - const dir = BunDebugHolder.dir orelse dir: { - const base_name = switch (Environment.os) { - else => "/tmp/bun-debug-src/", - .windows => brk: { - const temp = bun.fs.FileSystem.RealFS.platformTempDir(); - var win_temp_buffer: bun.PathBuffer = undefined; - @memcpy(win_temp_buffer[0..temp.len], temp); - const suffix = "\\bun-debug-src"; - @memcpy(win_temp_buffer[temp.len .. temp.len + suffix.len], suffix); - win_temp_buffer[temp.len + suffix.len] = 0; - break :brk win_temp_buffer[0 .. temp.len + suffix.len :0]; - }, - }; - const dir = try std.fs.cwd().makeOpenPath(base_name, .{}); - BunDebugHolder.dir = dir; - break :dir dir; - }; - - if (std.fs.path.dirname(specifier)) |dir_path| { - const root_len = switch (Environment.os) { - else => "/".len, - .windows => bun.path.windowsFilesystemRoot(dir_path).len, - }; - var parent = try dir.makeOpenPath(dir_path[root_len..], .{}); - defer parent.close(); - parent.writeFile(.{ - .sub_path = std.fs.path.basename(specifier), - .data = written, - }) catch |e| { - Output.debugWarn("Failed to dump source string: writeFile {}", .{e}); - return; - }; - if (vm.source_mappings.get(specifier)) |mappings| { - defer mappings.deref(); - const map_path = std.mem.concat(bun.default_allocator, u8, &.{ std.fs.path.basename(specifier), ".map" }) catch bun.outOfMemory(); - defer bun.default_allocator.free(map_path); - const file = try parent.createFile(map_path, .{}); - defer file.close(); - - const source_file = parent.readFileAlloc( - bun.default_allocator, - specifier, - std.math.maxInt(u64), - ) catch ""; - defer bun.default_allocator.free(source_file); - - var bufw = std.io.bufferedWriter(file.writer()); - const w = bufw.writer(); - try w.print( - \\{{ - \\ "version": 3, - \\ "file": {}, - \\ "sourceRoot": "", - \\ "sources": [{}], - \\ "sourcesContent": [{}], - \\ "names": [], - \\ "mappings": "{}" - \\}} - , .{ - bun.fmt.formatJSONStringUTF8(std.fs.path.basename(specifier), .{}), - bun.fmt.formatJSONStringUTF8(specifier, .{}), - bun.fmt.formatJSONStringUTF8(source_file, .{}), - mappings.formatVLQs(), - }); - try bufw.flush(); - } - } else { - dir.writeFile(.{ - .sub_path = std.fs.path.basename(specifier), - .data = written, - }) catch return; - } -} - -fn setBreakPointOnFirstLine() bool { - const s = struct { - var set_break_point: bool = true; - }; - const ret = s.set_break_point; - s.set_break_point = false; - return ret; -} - -pub const RuntimeTranspilerStore = struct { - generation_number: std.atomic.Value(u32) = std.atomic.Value(u32).init(0), - store: TranspilerJob.Store, - enabled: bool = true, - queue: Queue = Queue{}, - - pub const Queue = bun.UnboundedQueue(TranspilerJob, .next); - - pub fn init() RuntimeTranspilerStore { - return RuntimeTranspilerStore{ - .store = TranspilerJob.Store.init(bun.typedAllocator(TranspilerJob)), - }; - } - - // This is run at the top of the event loop on the JS thread. - pub fn drain(this: *RuntimeTranspilerStore) void { - var batch = this.queue.popBatch(); - var iter = batch.iterator(); - if (iter.next()) |job| { - // we run just one job first to see if there are more - job.runFromJSThread(); - } else { - return; - } - var vm: *VirtualMachine = @fieldParentPtr("transpiler_store", this); - const event_loop = vm.eventLoop(); - const global = vm.global; - const jsc_vm = vm.jsc; - while (iter.next()) |job| { - // if there are more, we need to drain the microtasks from the previous run - event_loop.drainMicrotasksWithGlobal(global, jsc_vm); - job.runFromJSThread(); - } - - // immediately after this is called, the microtasks will be drained again. - } - - pub fn transpile( - this: *RuntimeTranspilerStore, - vm: *VirtualMachine, - globalObject: *JSGlobalObject, - input_specifier: bun.String, - path: Fs.Path, - referrer: bun.String, - loader: bun.options.Loader, - package_json: ?*const PackageJSON, - ) *anyopaque { - var job: *TranspilerJob = this.store.get(); - const owned_path = Fs.Path.init(bun.default_allocator.dupe(u8, path.text) catch unreachable); - const promise = JSC.JSInternalPromise.create(globalObject); - - // NOTE: DirInfo should already be cached since module loading happens - // after module resolution, so this should be cheap - var resolved_source = ResolvedSource{}; - if (package_json) |pkg| { - switch (pkg.module_type) { - .cjs => { - resolved_source.tag = .package_json_type_commonjs; - resolved_source.is_commonjs_module = true; - }, - .esm => resolved_source.tag = .package_json_type_module, - .unknown => {}, - } - } - - job.* = TranspilerJob{ - .non_threadsafe_input_specifier = input_specifier, - .path = owned_path, - .globalThis = globalObject, - .non_threadsafe_referrer = referrer, - .vm = vm, - .log = logger.Log.init(bun.default_allocator), - .loader = loader, - .promise = JSC.Strong.create(JSValue.fromCell(promise), globalObject), - .poll_ref = .{}, - .fetcher = TranspilerJob.Fetcher{ - .file = {}, - }, - .resolved_source = resolved_source, - }; - if (comptime Environment.allow_assert) - debug("transpile({s}, {s}, async)", .{ path.text, @tagName(job.loader) }); - job.schedule(); - return promise; - } - - pub const TranspilerJob = struct { - path: Fs.Path, - non_threadsafe_input_specifier: String, - non_threadsafe_referrer: String, - loader: options.Loader, - promise: JSC.Strong = .empty, - vm: *VirtualMachine, - globalThis: *JSGlobalObject, - fetcher: Fetcher, - poll_ref: Async.KeepAlive = .{}, - generation_number: u32 = 0, - log: logger.Log, - parse_error: ?anyerror = null, - resolved_source: ResolvedSource = ResolvedSource{}, - work_task: JSC.WorkPoolTask = .{ .callback = runFromWorkerThread }, - next: ?*TranspilerJob = null, - - pub const Store = bun.HiveArray(TranspilerJob, if (bun.heap_breakdown.enabled) 0 else 64).Fallback; - - pub const Fetcher = union(enum) { - virtual_module: bun.String, - file: void, - - pub fn deinit(this: *@This()) void { - if (this.* == .virtual_module) { - this.virtual_module.deref(); - } - } - }; - - pub fn deinit(this: *TranspilerJob) void { - bun.default_allocator.free(this.path.text); - - this.poll_ref.disable(); - this.fetcher.deinit(); - this.loader = options.Loader.file; - this.non_threadsafe_input_specifier.deref(); - this.non_threadsafe_referrer.deref(); - this.path = Fs.Path.empty; - this.log.deinit(); - this.promise.deinit(); - this.globalThis = undefined; - } - - threadlocal var ast_memory_store: ?*js_ast.ASTMemoryAllocator = null; - threadlocal var source_code_printer: ?*js_printer.BufferPrinter = null; - - pub fn dispatchToMainThread(this: *TranspilerJob) void { - this.vm.transpiler_store.queue.push(this); - this.vm.eventLoop().enqueueTaskConcurrent(JSC.ConcurrentTask.createFrom(&this.vm.transpiler_store)); - } - - pub fn runFromJSThread(this: *TranspilerJob) void { - var vm = this.vm; - const promise = this.promise.swap(); - const globalThis = this.globalThis; - this.poll_ref.unref(vm); - - const referrer = this.non_threadsafe_referrer; - this.non_threadsafe_referrer = String.empty; - var log = this.log; - this.log = logger.Log.init(bun.default_allocator); - var resolved_source = this.resolved_source; - const specifier = brk: { - if (this.parse_error != null) { - break :brk bun.String.createUTF8(this.path.text); - } - - const out = this.non_threadsafe_input_specifier; - this.non_threadsafe_input_specifier = String.empty; - - bun.debugAssert(resolved_source.source_url.isEmpty()); - bun.debugAssert(resolved_source.specifier.isEmpty()); - resolved_source.source_url = out.createIfDifferent(this.path.text); - resolved_source.specifier = out.dupeRef(); - break :brk out; - }; - - const parse_error = this.parse_error; - this.promise.deinit(); - this.deinit(); - - _ = vm.transpiler_store.store.put(this); - - ModuleLoader.AsyncModule.fulfill(globalThis, promise, resolved_source, parse_error, specifier, referrer, &log); - } - - pub fn schedule(this: *TranspilerJob) void { - this.poll_ref.ref(this.vm); - JSC.WorkPool.schedule(&this.work_task); - } - - pub fn runFromWorkerThread(work_task: *JSC.WorkPoolTask) void { - @as(*TranspilerJob, @fieldParentPtr("work_task", work_task)).run(); - } - - pub fn run(this: *TranspilerJob) void { - var arena = bun.ArenaAllocator.init(bun.default_allocator); - defer arena.deinit(); - const allocator = arena.allocator(); - - defer this.dispatchToMainThread(); - if (this.generation_number != this.vm.transpiler_store.generation_number.load(.monotonic)) { - this.parse_error = error.TranspilerJobGenerationMismatch; - return; - } - - if (ast_memory_store == null) { - ast_memory_store = bun.default_allocator.create(js_ast.ASTMemoryAllocator) catch bun.outOfMemory(); - ast_memory_store.?.* = js_ast.ASTMemoryAllocator{ - .allocator = allocator, - .previous = null, - }; - } - - ast_memory_store.?.allocator = allocator; - ast_memory_store.?.reset(); - ast_memory_store.?.push(); - - const path = this.path; - const specifier = this.path.text; - const loader = this.loader; - this.log = logger.Log.init(bun.default_allocator); - - var cache = JSC.RuntimeTranspilerCache{ - .output_code_allocator = allocator, - .sourcemap_allocator = bun.default_allocator, - }; - - var vm = this.vm; - var transpiler: bun.Transpiler = undefined; - transpiler = vm.transpiler; - transpiler.setAllocator(allocator); - transpiler.setLog(&this.log); - transpiler.resolver.opts = transpiler.options; - transpiler.macro_context = null; - transpiler.linker.resolver = &transpiler.resolver; - - var fd: ?StoredFileDescriptorType = null; - var package_json: ?*PackageJSON = null; - const hash = bun.Watcher.getHash(path.text); - - switch (vm.bun_watcher) { - .hot, .watch => { - if (vm.bun_watcher.indexOf(hash)) |index| { - const watcher_fd = vm.bun_watcher.watchlist().items(.fd)[index]; - fd = if (watcher_fd.stdioTag() == null) watcher_fd else null; - package_json = vm.bun_watcher.watchlist().items(.package_json)[index]; - } - }, - else => {}, - } - - // this should be a cheap lookup because 24 bytes == 8 * 3 so it's read 3 machine words - const is_node_override = strings.hasPrefixComptime(specifier, NodeFallbackModules.import_path); - - const macro_remappings = if (vm.macro_mode or !vm.has_any_macro_remappings or is_node_override) - MacroRemap{} - else - transpiler.options.macro_remap; - - var fallback_source: logger.Source = undefined; - - // Usually, we want to close the input file automatically. - // - // If we're re-using the file descriptor from the fs watcher - // Do not close it because that will break the kqueue-based watcher - // - var should_close_input_file_fd = fd == null; - - var input_file_fd: StoredFileDescriptorType = .invalid; - - const is_main = vm.main.len == path.text.len and - vm.main_hash == hash and - strings.eqlLong(vm.main, path.text, false); - - const module_type: ModuleType = switch (this.resolved_source.tag) { - .package_json_type_commonjs => .cjs, - .package_json_type_module => .esm, - else => .unknown, - }; - - var parse_options = Transpiler.ParseOptions{ - .allocator = allocator, - .path = path, - .loader = loader, - .dirname_fd = .invalid, - .file_descriptor = fd, - .file_fd_ptr = &input_file_fd, - .file_hash = hash, - .macro_remappings = macro_remappings, - .jsx = transpiler.options.jsx, - .emit_decorator_metadata = transpiler.options.emit_decorator_metadata, - .virtual_source = null, - .dont_bundle_twice = true, - .allow_commonjs = true, - .inject_jest_globals = transpiler.options.rewrite_jest_for_tests, - .set_breakpoint_on_first_line = vm.debugger != null and - vm.debugger.?.set_breakpoint_on_first_line and - is_main and - setBreakPointOnFirstLine(), - .runtime_transpiler_cache = if (!JSC.RuntimeTranspilerCache.is_disabled) &cache else null, - .remove_cjs_module_wrapper = is_main and vm.module_loader.eval_source != null, - .module_type = module_type, - .allow_bytecode_cache = true, - }; - - defer { - if (should_close_input_file_fd and input_file_fd.isValid()) { - input_file_fd.close(); - input_file_fd = .invalid; - } - } - - if (is_node_override) { - if (NodeFallbackModules.contentsFromPath(specifier)) |code| { - const fallback_path = Fs.Path.initWithNamespace(specifier, "node"); - fallback_source = logger.Source{ .path = fallback_path, .contents = code }; - parse_options.virtual_source = &fallback_source; - } - } - - var parse_result: bun.transpiler.ParseResult = transpiler.parseMaybeReturnFileOnlyAllowSharedBuffer( - parse_options, - null, - false, - false, - ) orelse { - if (vm.isWatcherEnabled()) { - if (input_file_fd.isValid()) { - if (!is_node_override and std.fs.path.isAbsolute(path.text) and !strings.contains(path.text, "node_modules")) { - should_close_input_file_fd = false; - _ = vm.bun_watcher.addFile( - input_file_fd, - path.text, - hash, - loader, - .invalid, - package_json, - true, - ); - } - } - } - - this.parse_error = error.ParseError; - return; - }; - - if (vm.isWatcherEnabled()) { - if (input_file_fd.isValid()) { - if (!is_node_override and - std.fs.path.isAbsolute(path.text) and !strings.contains(path.text, "node_modules")) - { - should_close_input_file_fd = false; - _ = vm.bun_watcher.addFile( - input_file_fd, - path.text, - hash, - loader, - .invalid, - package_json, - true, - ); - } - } - } - - if (cache.entry) |*entry| { - vm.source_mappings.putMappings(parse_result.source, .{ - .list = .{ .items = @constCast(entry.sourcemap), .capacity = entry.sourcemap.len }, - .allocator = bun.default_allocator, - }) catch {}; - - if (comptime Environment.dump_source) { - dumpSourceString(vm, specifier, entry.output_code.byteSlice()); - } - - this.resolved_source = ResolvedSource{ - .allocator = null, - .source_code = switch (entry.output_code) { - .string => entry.output_code.string, - .utf8 => brk: { - const result = bun.String.createUTF8(entry.output_code.utf8); - cache.output_code_allocator.free(entry.output_code.utf8); - entry.output_code.utf8 = ""; - break :brk result; - }, - }, - .is_commonjs_module = entry.metadata.module_type == .cjs, - .tag = this.resolved_source.tag, - }; - - return; - } - - if (parse_result.already_bundled != .none) { - const bytecode_slice = parse_result.already_bundled.bytecodeSlice(); - this.resolved_source = ResolvedSource{ - .allocator = null, - .source_code = bun.String.createLatin1(parse_result.source.contents), - .already_bundled = true, - .bytecode_cache = if (bytecode_slice.len > 0) bytecode_slice.ptr else null, - .bytecode_cache_size = bytecode_slice.len, - .is_commonjs_module = parse_result.already_bundled.isCommonJS(), - .tag = this.resolved_source.tag, - }; - this.resolved_source.source_code.ensureHash(); - return; - } - - for (parse_result.ast.import_records.slice()) |*import_record_| { - var import_record: *bun.ImportRecord = import_record_; - - if (JSC.HardcodedModule.Alias.get(import_record.path.text, transpiler.options.target)) |replacement| { - import_record.path.text = replacement.path; - import_record.tag = replacement.tag; - import_record.is_external_without_side_effects = true; - continue; - } - - if (transpiler.options.rewrite_jest_for_tests) { - if (strings.eqlComptime( - import_record.path.text, - "@jest/globals", - ) or strings.eqlComptime( - import_record.path.text, - "vitest", - )) { - import_record.path.namespace = "bun"; - import_record.tag = .bun_test; - import_record.path.text = "test"; - import_record.is_external_without_side_effects = true; - continue; - } - } - - if (strings.hasPrefixComptime(import_record.path.text, "bun:")) { - import_record.path = Fs.Path.init(import_record.path.text["bun:".len..]); - import_record.path.namespace = "bun"; - import_record.is_external_without_side_effects = true; - - if (strings.eqlComptime(import_record.path.text, "test")) { - import_record.tag = .bun_test; - } - } - } - - if (source_code_printer == null) { - const writer = js_printer.BufferWriter.init(bun.default_allocator); - source_code_printer = bun.default_allocator.create(js_printer.BufferPrinter) catch unreachable; - source_code_printer.?.* = js_printer.BufferPrinter.init(writer); - source_code_printer.?.ctx.append_null_byte = false; - } - - var printer = source_code_printer.?.*; - printer.ctx.reset(); - - { - var mapper = vm.sourceMapHandler(&printer); - defer source_code_printer.?.* = printer; - _ = transpiler.printWithSourceMap( - parse_result, - @TypeOf(&printer), - &printer, - .esm_ascii, - mapper.get(), - ) catch |err| { - this.parse_error = err; - return; - }; - } - - if (comptime Environment.dump_source) { - dumpSource(this.vm, specifier, &printer); - } - - const source_code = brk: { - const written = printer.ctx.getWritten(); - - const result = cache.output_code orelse bun.String.createLatin1(written); - - if (written.len > 1024 * 1024 * 2 or vm.smol) { - printer.ctx.buffer.deinit(); - source_code_printer.?.* = printer; - } - - // In a benchmarking loading @babel/standalone 100 times: - // - // After ensureHash: - // 354.00 ms 4.2% 354.00 ms WTF::StringImpl::hashSlowCase() const - // - // Before ensureHash: - // 506.00 ms 6.1% 506.00 ms WTF::StringImpl::hashSlowCase() const - // - result.ensureHash(); - - break :brk result; - }; - this.resolved_source = ResolvedSource{ - .allocator = null, - .source_code = source_code, - .is_commonjs_module = parse_result.ast.has_commonjs_export_names or parse_result.ast.exports_kind == .cjs, - .tag = this.resolved_source.tag, - }; - } - }; -}; - -pub const ModuleLoader = struct { - transpile_source_code_arena: ?*bun.ArenaAllocator = null, - eval_source: ?*logger.Source = null, - - pub var is_allowed_to_use_internal_testing_apis = false; - - /// This must be called after calling transpileSourceCode - pub fn resetArena(this: *ModuleLoader, jsc_vm: *VirtualMachine) void { - bun.assert(&jsc_vm.module_loader == this); - if (this.transpile_source_code_arena) |arena| { - if (jsc_vm.smol) { - _ = arena.reset(.free_all); - } else { - _ = arena.reset(.{ .retain_with_limit = 8 * 1024 * 1024 }); - } - } - } - - pub fn resolveEmbeddedFile(vm: *VirtualMachine, input_path: []const u8, extname: []const u8) ?[]const u8 { - if (input_path.len == 0) return null; - var graph = vm.standalone_module_graph orelse return null; - const file = graph.find(input_path) orelse return null; - - if (comptime Environment.isLinux) { - // TODO: use /proc/fd/12346 instead! Avoid the copy! - } - - // atomically write to a tmpfile and then move it to the final destination - var tmpname_buf: bun.PathBuffer = undefined; - const tmpfilename = bun.sliceTo(bun.fs.FileSystem.instance.tmpname(extname, &tmpname_buf, bun.hash(file.name)) catch return null, 0); - - const tmpdir: bun.FD = .fromStdDir(bun.fs.FileSystem.instance.tmpdir() catch return null); - - // First we open the tmpfile, to avoid any other work in the event of failure. - const tmpfile = bun.Tmpfile.create(tmpdir, tmpfilename).unwrap() catch return null; - defer tmpfile.fd.close(); - - switch (JSC.Node.NodeFS.writeFileWithPathBuffer( - &tmpname_buf, // not used - - .{ - .data = .{ - .encoded_slice = ZigString.Slice.fromUTF8NeverFree(file.contents), - }, - .dirfd = tmpdir, - .file = .{ .fd = tmpfile.fd }, - .encoding = .buffer, - }, - )) { - .err => { - return null; - }, - else => {}, - } - return bun.path.joinAbs(bun.fs.FileSystem.instance.fs.tmpdirPath(), .auto, tmpfilename); - } - - pub const AsyncModule = struct { - - // This is all the state used by the printer to print the module - parse_result: ParseResult, - promise: JSC.Strong = .empty, - path: Fs.Path, - specifier: string = "", - referrer: string = "", - string_buf: []u8 = &[_]u8{}, - fd: ?StoredFileDescriptorType = null, - package_json: ?*PackageJSON = null, - loader: Api.Loader, - hash: u32 = std.math.maxInt(u32), - globalThis: *JSGlobalObject = undefined, - arena: *bun.ArenaAllocator, - - // This is the specific state for making it async - poll_ref: Async.KeepAlive = .{}, - any_task: JSC.AnyTask = undefined, - - pub const Id = u32; - - const PackageDownloadError = struct { - name: []const u8, - resolution: Install.Resolution, - err: anyerror, - url: []const u8, - }; - - const PackageResolveError = struct { - name: []const u8, - err: anyerror, - url: []const u8, - version: Dependency.Version, - }; - - pub const Queue = struct { - map: Map = .{}, - scheduled: u32 = 0, - concurrent_task_count: std.atomic.Value(u32) = std.atomic.Value(u32).init(0), - - const DeferredDependencyError = struct { - dependency: Dependency, - root_dependency_id: Install.DependencyID, - err: anyerror, - }; - - pub const Map = std.ArrayListUnmanaged(AsyncModule); - - pub fn enqueue(this: *Queue, globalObject: *JSGlobalObject, opts: anytype) void { - debug("enqueue: {s}", .{opts.specifier}); - var module = AsyncModule.init(opts, globalObject) catch unreachable; - module.poll_ref.ref(this.vm()); - - this.map.append(this.vm().allocator, module) catch unreachable; - this.vm().packageManager().drainDependencyList(); - } - - pub fn onDependencyError(ctx: *anyopaque, dependency: Dependency, root_dependency_id: Install.DependencyID, err: anyerror) void { - var this = bun.cast(*Queue, ctx); - debug("onDependencyError: {s}", .{this.vm().packageManager().lockfile.str(&dependency.name)}); - - var modules: []AsyncModule = this.map.items; - var i: usize = 0; - outer: for (modules) |module_| { - var module = module_; - const root_dependency_ids = module.parse_result.pending_imports.items(.root_dependency_id); - for (root_dependency_ids, 0..) |dep, dep_i| { - if (dep != root_dependency_id) continue; - module.resolveError( - this.vm(), - module.parse_result.pending_imports.items(.import_record_id)[dep_i], - .{ - .name = this.vm().packageManager().lockfile.str(&dependency.name), - .err = err, - .url = "", - .version = dependency.version, - }, - ) catch unreachable; - continue :outer; - } - - modules[i] = module; - i += 1; - } - this.map.items.len = i; - } - pub fn onWakeHandler(ctx: *anyopaque, _: *PackageManager) void { - debug("onWake", .{}); - var this = bun.cast(*Queue, ctx); - this.vm().enqueueTaskConcurrent(JSC.ConcurrentTask.createFrom(this)); - } - - pub fn onPoll(this: *Queue) void { - debug("onPoll", .{}); - this.runTasks(); - this.pollModules(); - } - - pub fn runTasks(this: *Queue) void { - var pm = this.vm().packageManager(); - - if (Output.enable_ansi_colors_stderr) { - pm.startProgressBarIfNone(); - pm.runTasks( - *Queue, - this, - .{ - .onExtract = {}, - .onResolve = onResolve, - .onPackageManifestError = onPackageManifestError, - .onPackageDownloadError = onPackageDownloadError, - .progress_bar = true, - }, - true, - PackageManager.Options.LogLevel.default, - ) catch unreachable; - } else { - pm.runTasks( - *Queue, - this, - .{ - .onExtract = {}, - .onResolve = onResolve, - .onPackageManifestError = onPackageManifestError, - .onPackageDownloadError = onPackageDownloadError, - }, - true, - PackageManager.Options.LogLevel.default_no_progress, - ) catch unreachable; - } - } - - pub fn onResolve(_: *Queue) void { - debug("onResolve", .{}); - } - - pub fn onPackageManifestError( - this: *Queue, - name: []const u8, - err: anyerror, - url: []const u8, - ) void { - debug("onPackageManifestError: {s}", .{name}); - - var modules: []AsyncModule = this.map.items; - var i: usize = 0; - outer: for (modules) |module_| { - var module = module_; - const tags = module.parse_result.pending_imports.items(.tag); - for (tags, 0..) |tag, tag_i| { - if (tag == .resolve) { - const esms = module.parse_result.pending_imports.items(.esm); - const esm = esms[tag_i]; - const string_bufs = module.parse_result.pending_imports.items(.string_buf); - - if (!strings.eql(esm.name.slice(string_bufs[tag_i]), name)) continue; - - const versions = module.parse_result.pending_imports.items(.dependency); - - module.resolveError( - this.vm(), - module.parse_result.pending_imports.items(.import_record_id)[tag_i], - .{ - .name = name, - .err = err, - .url = url, - .version = versions[tag_i], - }, - ) catch unreachable; - continue :outer; - } - } - - modules[i] = module; - i += 1; - } - this.map.items.len = i; - } - - pub fn onPackageDownloadError( - this: *Queue, - package_id: Install.PackageID, - name: []const u8, - resolution: *const Install.Resolution, - err: anyerror, - url: []const u8, - ) void { - debug("onPackageDownloadError: {s}", .{name}); - - const resolution_ids = this.vm().packageManager().lockfile.buffers.resolutions.items; - var modules: []AsyncModule = this.map.items; - var i: usize = 0; - outer: for (modules) |module_| { - var module = module_; - const record_ids = module.parse_result.pending_imports.items(.import_record_id); - const root_dependency_ids = module.parse_result.pending_imports.items(.root_dependency_id); - for (root_dependency_ids, 0..) |dependency_id, import_id| { - if (resolution_ids[dependency_id] != package_id) continue; - module.downloadError( - this.vm(), - record_ids[import_id], - .{ - .name = name, - .resolution = resolution.*, - .err = err, - .url = url, - }, - ) catch unreachable; - continue :outer; - } - - modules[i] = module; - i += 1; - } - this.map.items.len = i; - } - - pub fn pollModules(this: *Queue) void { - var pm = this.vm().packageManager(); - if (pm.pending_tasks.load(.monotonic) > 0) return; - - var modules: []AsyncModule = this.map.items; - var i: usize = 0; - - for (modules) |mod| { - var module = mod; - var tags = module.parse_result.pending_imports.items(.tag); - const root_dependency_ids = module.parse_result.pending_imports.items(.root_dependency_id); - // var esms = module.parse_result.pending_imports.items(.esm); - // var versions = module.parse_result.pending_imports.items(.dependency); - var done_count: usize = 0; - for (tags, 0..) |tag, tag_i| { - const root_id = root_dependency_ids[tag_i]; - const resolution_ids = pm.lockfile.buffers.resolutions.items; - if (root_id >= resolution_ids.len) continue; - const package_id = resolution_ids[root_id]; - - switch (tag) { - .resolve => { - if (package_id == Install.invalid_package_id) { - continue; - } - - // if we get here, the package has already been resolved. - tags[tag_i] = .download; - }, - .download => { - if (package_id == Install.invalid_package_id) { - unreachable; - } - }, - .done => { - done_count += 1; - continue; - }, - } - - if (package_id == Install.invalid_package_id) { - continue; - } - - const package = pm.lockfile.packages.get(package_id); - bun.assert(package.resolution.tag != .root); - - var name_and_version_hash: ?u64 = null; - var patchfile_hash: ?u64 = null; - switch (pm.determinePreinstallState(package, pm.lockfile, &name_and_version_hash, &patchfile_hash)) { - .done => { - // we are only truly done if all the dependencies are done. - const current_tasks = pm.total_tasks; - // so if enqueuing all the dependencies produces no new tasks, we are done. - pm.enqueueDependencyList(package.dependencies); - if (current_tasks == pm.total_tasks) { - tags[tag_i] = .done; - done_count += 1; - } - }, - .extracting => { - // we are extracting the package - // we need to wait for the next poll - continue; - }, - .extract => {}, - else => {}, - } - } - - if (done_count == tags.len) { - module.done(this.vm()); - } else { - modules[i] = module; - i += 1; - } - } - this.map.items.len = i; - if (i == 0) { - // ensure we always end the progress bar - this.vm().packageManager().endProgressBar(); - } - } - - pub fn vm(this: *Queue) *VirtualMachine { - return @alignCast(@fieldParentPtr("modules", this)); - } - }; - - pub fn init(opts: anytype, globalObject: *JSGlobalObject) !AsyncModule { - // var stmt_blocks = js_ast.Stmt.Data.toOwnedSlice(); - // var expr_blocks = js_ast.Expr.Data.toOwnedSlice(); - const this_promise = JSValue.createInternalPromise(globalObject); - const promise = JSC.Strong.create(this_promise, globalObject); - - var buf = bun.StringBuilder{}; - buf.count(opts.referrer); - buf.count(opts.specifier); - buf.count(opts.path.text); - - try buf.allocate(bun.default_allocator); - opts.promise_ptr.?.* = this_promise.asInternalPromise().?; - const referrer = buf.append(opts.referrer); - const specifier = buf.append(opts.specifier); - const path = Fs.Path.init(buf.append(opts.path.text)); - - return AsyncModule{ - .parse_result = opts.parse_result, - .promise = promise, - .path = path, - .specifier = specifier, - .referrer = referrer, - .fd = opts.fd, - .package_json = opts.package_json, - .loader = opts.loader.toAPI(), - .string_buf = buf.allocatedSlice(), - // .stmt_blocks = stmt_blocks, - // .expr_blocks = expr_blocks, - .globalThis = globalObject, - .arena = opts.arena, - }; - } - - pub fn done(this: *AsyncModule, jsc_vm: *VirtualMachine) void { - var clone = jsc_vm.allocator.create(AsyncModule) catch unreachable; - clone.* = this.*; - jsc_vm.modules.scheduled += 1; - clone.any_task = JSC.AnyTask.New(AsyncModule, onDone).init(clone); - jsc_vm.enqueueTask(JSC.Task.init(&clone.any_task)); - } - - pub fn onDone(this: *AsyncModule) void { - JSC.markBinding(@src()); - var jsc_vm = this.globalThis.bunVM(); - jsc_vm.modules.scheduled -= 1; - if (jsc_vm.modules.scheduled == 0) { - jsc_vm.packageManager().endProgressBar(); - } - var log = logger.Log.init(jsc_vm.allocator); - defer log.deinit(); - var errorable: JSC.ErrorableResolvedSource = undefined; - this.poll_ref.unref(jsc_vm); - outer: { - errorable = JSC.ErrorableResolvedSource.ok(this.resumeLoadingModule(&log) catch |err| { - VirtualMachine.processFetchLog( - this.globalThis, - bun.String.init(this.specifier), - bun.String.init(this.referrer), - &log, - &errorable, - err, - ); - break :outer; - }); - } - - var spec = bun.String.init(ZigString.init(this.specifier).withEncoding()); - var ref = bun.String.init(ZigString.init(this.referrer).withEncoding()); - Bun__onFulfillAsyncModule( - this.globalThis, - this.promise.get().?, - &errorable, - &spec, - &ref, - ); - this.deinit(); - jsc_vm.allocator.destroy(this); - } - - pub fn fulfill( - globalThis: *JSGlobalObject, - promise: JSValue, - resolved_source: ResolvedSource, - err: ?anyerror, - specifier_: bun.String, - referrer_: bun.String, - log: *logger.Log, - ) void { - JSC.markBinding(@src()); - var specifier = specifier_; - var referrer = referrer_; - defer { - specifier.deref(); - referrer.deref(); - } - - var errorable: JSC.ErrorableResolvedSource = undefined; - if (err) |e| { - VirtualMachine.processFetchLog( - globalThis, - specifier, - referrer, - log, - &errorable, - e, - ); - } else { - errorable = JSC.ErrorableResolvedSource.ok(resolved_source); - } - log.deinit(); - - debug("fulfill: {any}", .{specifier}); - - Bun__onFulfillAsyncModule( - globalThis, - promise, - &errorable, - &specifier, - &referrer, - ); - } - - pub fn resolveError(this: *AsyncModule, vm: *VirtualMachine, import_record_id: u32, result: PackageResolveError) !void { - const globalThis = this.globalThis; - - const msg: []u8 = try switch (result.err) { - error.PackageManifestHTTP400 => std.fmt.allocPrint( - bun.default_allocator, - "HTTP 400 while resolving package '{s}' at '{s}'", - .{ result.name, result.url }, - ), - error.PackageManifestHTTP401 => std.fmt.allocPrint( - bun.default_allocator, - "HTTP 401 while resolving package '{s}' at '{s}'", - .{ result.name, result.url }, - ), - error.PackageManifestHTTP402 => std.fmt.allocPrint( - bun.default_allocator, - "HTTP 402 while resolving package '{s}' at '{s}'", - .{ result.name, result.url }, - ), - error.PackageManifestHTTP403 => std.fmt.allocPrint( - bun.default_allocator, - "HTTP 403 while resolving package '{s}' at '{s}'", - .{ result.name, result.url }, - ), - error.PackageManifestHTTP404 => std.fmt.allocPrint( - bun.default_allocator, - "Package '{s}' was not found", - .{result.name}, - ), - error.PackageManifestHTTP4xx => std.fmt.allocPrint( - bun.default_allocator, - "HTTP 4xx while resolving package '{s}' at '{s}'", - .{ result.name, result.url }, - ), - error.PackageManifestHTTP5xx => std.fmt.allocPrint( - bun.default_allocator, - "HTTP 5xx while resolving package '{s}' at '{s}'", - .{ result.name, result.url }, - ), - error.DistTagNotFound, error.NoMatchingVersion => brk: { - const prefix: []const u8 = if (result.err == error.NoMatchingVersion and result.version.tag == .npm and result.version.value.npm.version.isExact()) - "Version not found" - else if (result.version.tag == .npm and !result.version.value.npm.version.isExact()) - "No matching version found" - else - "No match found"; - - break :brk std.fmt.allocPrint( - bun.default_allocator, - "{s} '{s}' for package '{s}' (but package exists)", - .{ prefix, vm.packageManager().lockfile.str(&result.version.literal), result.name }, - ); - }, - else => |err| std.fmt.allocPrint( - bun.default_allocator, - "{s} resolving package '{s}' at '{s}'", - .{ bun.asByteSlice(@errorName(err)), result.name, result.url }, - ), - }; - - const name: []const u8 = switch (result.err) { - error.NoMatchingVersion => "PackageVersionNotFound", - error.DistTagNotFound => "PackageTagNotFound", - error.PackageManifestHTTP403 => "PackageForbidden", - error.PackageManifestHTTP404 => "PackageNotFound", - else => "PackageResolveError", - }; - - var error_instance = ZigString.init(msg).withEncoding().toErrorInstance(globalThis); - if (result.url.len > 0) - error_instance.put(globalThis, ZigString.static("url"), ZigString.init(result.url).withEncoding().toJS(globalThis)); - error_instance.put(globalThis, ZigString.static("name"), ZigString.init(name).withEncoding().toJS(globalThis)); - error_instance.put(globalThis, ZigString.static("pkg"), ZigString.init(result.name).withEncoding().toJS(globalThis)); - error_instance.put(globalThis, ZigString.static("specifier"), ZigString.init(this.specifier).withEncoding().toJS(globalThis)); - const location = logger.rangeData(&this.parse_result.source, this.parse_result.ast.import_records.at(import_record_id).range, "").location.?; - error_instance.put(globalThis, ZigString.static("sourceURL"), ZigString.init(this.parse_result.source.path.text).withEncoding().toJS(globalThis)); - error_instance.put(globalThis, ZigString.static("line"), JSValue.jsNumber(location.line)); - if (location.line_text) |line_text| { - error_instance.put(globalThis, ZigString.static("lineText"), ZigString.init(line_text).withEncoding().toJS(globalThis)); - } - error_instance.put(globalThis, ZigString.static("column"), JSValue.jsNumber(location.column)); - if (this.referrer.len > 0 and !strings.eqlComptime(this.referrer, "undefined")) { - error_instance.put(globalThis, ZigString.static("referrer"), ZigString.init(this.referrer).withEncoding().toJS(globalThis)); - } - - const promise_value = this.promise.swap(); - var promise = promise_value.asInternalPromise().?; - promise_value.ensureStillAlive(); - this.poll_ref.unref(vm); - this.deinit(); - promise.rejectAsHandled(globalThis, error_instance); - } - pub fn downloadError(this: *AsyncModule, vm: *VirtualMachine, import_record_id: u32, result: PackageDownloadError) !void { - const globalThis = this.globalThis; - - const msg_args = .{ - result.name, - result.resolution.fmt(vm.packageManager().lockfile.buffers.string_bytes.items, .any), - }; - - const msg: []u8 = try switch (result.err) { - error.TarballHTTP400 => std.fmt.allocPrint( - bun.default_allocator, - "HTTP 400 downloading package '{s}@{any}'", - msg_args, - ), - error.TarballHTTP401 => std.fmt.allocPrint( - bun.default_allocator, - "HTTP 401 downloading package '{s}@{any}'", - msg_args, - ), - error.TarballHTTP402 => std.fmt.allocPrint( - bun.default_allocator, - "HTTP 402 downloading package '{s}@{any}'", - msg_args, - ), - error.TarballHTTP403 => std.fmt.allocPrint( - bun.default_allocator, - "HTTP 403 downloading package '{s}@{any}'", - msg_args, - ), - error.TarballHTTP404 => std.fmt.allocPrint( - bun.default_allocator, - "HTTP 404 downloading package '{s}@{any}'", - msg_args, - ), - error.TarballHTTP4xx => std.fmt.allocPrint( - bun.default_allocator, - "HTTP 4xx downloading package '{s}@{any}'", - msg_args, - ), - error.TarballHTTP5xx => std.fmt.allocPrint( - bun.default_allocator, - "HTTP 5xx downloading package '{s}@{any}'", - msg_args, - ), - error.TarballFailedToExtract => std.fmt.allocPrint( - bun.default_allocator, - "Failed to extract tarball for package '{s}@{any}'", - msg_args, - ), - else => |err| std.fmt.allocPrint( - bun.default_allocator, - "{s} downloading package '{s}@{any}'", - .{ - bun.asByteSlice(@errorName(err)), - result.name, - result.resolution.fmt(vm.packageManager().lockfile.buffers.string_bytes.items, .any), - }, - ), - }; - - const name: []const u8 = switch (result.err) { - error.TarballFailedToExtract => "PackageExtractionError", - error.TarballHTTP403 => "TarballForbiddenError", - error.TarballHTTP404 => "TarballNotFoundError", - else => "TarballDownloadError", - }; - - var error_instance = ZigString.init(msg).withEncoding().toErrorInstance(globalThis); - if (result.url.len > 0) - error_instance.put(globalThis, ZigString.static("url"), ZigString.init(result.url).withEncoding().toJS(globalThis)); - error_instance.put(globalThis, ZigString.static("name"), ZigString.init(name).withEncoding().toJS(globalThis)); - error_instance.put(globalThis, ZigString.static("pkg"), ZigString.init(result.name).withEncoding().toJS(globalThis)); - if (this.specifier.len > 0 and !strings.eqlComptime(this.specifier, "undefined")) { - error_instance.put(globalThis, ZigString.static("referrer"), ZigString.init(this.specifier).withEncoding().toJS(globalThis)); - } - - const location = logger.rangeData(&this.parse_result.source, this.parse_result.ast.import_records.at(import_record_id).range, "").location.?; - error_instance.put(globalThis, ZigString.static("specifier"), ZigString.init( - this.parse_result.ast.import_records.at(import_record_id).path.text, - ).withEncoding().toJS(globalThis)); - error_instance.put(globalThis, ZigString.static("sourceURL"), ZigString.init(this.parse_result.source.path.text).withEncoding().toJS(globalThis)); - error_instance.put(globalThis, ZigString.static("line"), JSValue.jsNumber(location.line)); - if (location.line_text) |line_text| { - error_instance.put(globalThis, ZigString.static("lineText"), ZigString.init(line_text).withEncoding().toJS(globalThis)); - } - error_instance.put(globalThis, ZigString.static("column"), JSValue.jsNumber(location.column)); - - const promise_value = this.promise.swap(); - var promise = promise_value.asInternalPromise().?; - promise_value.ensureStillAlive(); - this.poll_ref.unref(vm); - this.deinit(); - promise.rejectAsHandled(globalThis, error_instance); - } - - pub fn resumeLoadingModule(this: *AsyncModule, log: *logger.Log) !ResolvedSource { - debug("resumeLoadingModule: {s}", .{this.specifier}); - var parse_result = this.parse_result; - const path = this.path; - var jsc_vm = VirtualMachine.get(); - const specifier = this.specifier; - const old_log = jsc_vm.log; - - jsc_vm.transpiler.linker.log = log; - jsc_vm.transpiler.log = log; - jsc_vm.transpiler.resolver.log = log; - jsc_vm.packageManager().log = log; - defer { - jsc_vm.transpiler.linker.log = old_log; - jsc_vm.transpiler.log = old_log; - jsc_vm.transpiler.resolver.log = old_log; - jsc_vm.packageManager().log = old_log; - } - - // We _must_ link because: - // - node_modules bundle won't be properly - try jsc_vm.transpiler.linker.link( - path, - &parse_result, - jsc_vm.origin, - .absolute_path, - false, - true, - ); - this.parse_result = parse_result; - - var printer = VirtualMachine.source_code_printer.?.*; - printer.ctx.reset(); - - { - var mapper = jsc_vm.sourceMapHandler(&printer); - defer VirtualMachine.source_code_printer.?.* = printer; - _ = try jsc_vm.transpiler.printWithSourceMap( - parse_result, - @TypeOf(&printer), - &printer, - .esm_ascii, - mapper.get(), - ); - } - - if (comptime Environment.dump_source) { - dumpSource(jsc_vm, specifier, &printer); - } - - if (jsc_vm.isWatcherEnabled()) { - var resolved_source = jsc_vm.refCountedResolvedSource(printer.ctx.written, bun.String.init(specifier), path.text, null, false); - - if (parse_result.input_fd) |fd_| { - if (std.fs.path.isAbsolute(path.text) and !strings.contains(path.text, "node_modules")) { - _ = jsc_vm.bun_watcher.addFile( - fd_, - path.text, - this.hash, - options.Loader.fromAPI(this.loader), - .invalid, - this.package_json, - true, - ); - } - } - - resolved_source.is_commonjs_module = parse_result.ast.has_commonjs_export_names or parse_result.ast.exports_kind == .cjs; - - return resolved_source; - } - - return ResolvedSource{ - .allocator = null, - .source_code = bun.String.createLatin1(printer.ctx.getWritten()), - .specifier = String.init(specifier), - .source_url = String.init(path.text), - .is_commonjs_module = parse_result.ast.has_commonjs_export_names or parse_result.ast.exports_kind == .cjs, - }; - } - - pub fn deinit(this: *AsyncModule) void { - this.promise.deinit(); - this.parse_result.deinit(); - this.arena.deinit(); - this.globalThis.bunVM().allocator.destroy(this.arena); - // bun.default_allocator.free(this.stmt_blocks); - // bun.default_allocator.free(this.expr_blocks); - - bun.default_allocator.free(this.string_buf); - } - - extern "c" fn Bun__onFulfillAsyncModule( - globalObject: *JSGlobalObject, - promiseValue: JSValue, - res: *JSC.ErrorableResolvedSource, - specifier: *bun.String, - referrer: *bun.String, - ) void; - }; - - pub export fn Bun__getDefaultLoader(global: *JSGlobalObject, str: *const bun.String) Api.Loader { - var jsc_vm = global.bunVM(); - const filename = str.toUTF8(jsc_vm.allocator); - defer filename.deinit(); - const loader = jsc_vm.transpiler.options.loader(Fs.PathName.init(filename.slice()).ext).toAPI(); - if (loader == .file) { - return Api.Loader.js; - } - - return loader; - } - - pub fn transpileSourceCode( - jsc_vm: *VirtualMachine, - specifier: string, - referrer: string, - input_specifier: String, - path: Fs.Path, - loader: options.Loader, - module_type: options.ModuleType, - log: *logger.Log, - virtual_source: ?*const logger.Source, - promise_ptr: ?*?*JSC.JSInternalPromise, - source_code_printer: *js_printer.BufferPrinter, - globalObject: ?*JSGlobalObject, - comptime flags: FetchFlags, - ) !ResolvedSource { - const disable_transpilying = comptime flags.disableTranspiling(); - - if (comptime disable_transpilying) { - if (!(loader.isJavaScriptLike() or loader == .toml or loader == .text or loader == .json or loader == .jsonc)) { - // Don't print "export default " - return ResolvedSource{ - .allocator = null, - .source_code = bun.String.empty, - .specifier = input_specifier, - .source_url = input_specifier.createIfDifferent(path.text), - }; - } - } - - switch (loader) { - .js, .jsx, .ts, .tsx, .json, .jsonc, .toml, .text => { - jsc_vm.transpiled_count += 1; - jsc_vm.transpiler.resetStore(); - const hash = bun.Watcher.getHash(path.text); - const is_main = jsc_vm.main.len == path.text.len and - jsc_vm.main_hash == hash and - strings.eqlLong(jsc_vm.main, path.text, false); - - var arena_: ?*bun.ArenaAllocator = brk: { - // Attempt to reuse the Arena from the parser when we can - // This code is potentially re-entrant, so only one Arena can be reused at a time - // That's why we have to check if the Arena is null - // - // Using an Arena here is a significant memory optimization when loading many files - if (jsc_vm.module_loader.transpile_source_code_arena) |shared| { - jsc_vm.module_loader.transpile_source_code_arena = null; - break :brk shared; - } - - // we must allocate the arena so that the pointer it points to is always valid. - const arena = try jsc_vm.allocator.create(bun.ArenaAllocator); - arena.* = bun.ArenaAllocator.init(bun.default_allocator); - break :brk arena; - }; - - var give_back_arena = true; - defer { - if (give_back_arena) { - if (jsc_vm.module_loader.transpile_source_code_arena == null) { - // when .print_source is used - // caller is responsible for freeing the arena - if (flags != .print_source) { - if (jsc_vm.smol) { - _ = arena_.?.reset(.free_all); - } else { - _ = arena_.?.reset(.{ .retain_with_limit = 8 * 1024 * 1024 }); - } - } - - jsc_vm.module_loader.transpile_source_code_arena = arena_; - } else { - arena_.?.deinit(); - jsc_vm.allocator.destroy(arena_.?); - } - } - } - - var arena = arena_.?; - const allocator = arena.allocator(); - - var fd: ?StoredFileDescriptorType = null; - var package_json: ?*PackageJSON = null; - - if (jsc_vm.bun_watcher.indexOf(hash)) |index| { - fd = jsc_vm.bun_watcher.watchlist().items(.fd)[index].unwrapValid(); - package_json = jsc_vm.bun_watcher.watchlist().items(.package_json)[index]; - } - - var cache = JSC.RuntimeTranspilerCache{ - .output_code_allocator = allocator, - .sourcemap_allocator = bun.default_allocator, - }; - - const old = jsc_vm.transpiler.log; - jsc_vm.transpiler.log = log; - jsc_vm.transpiler.linker.log = log; - jsc_vm.transpiler.resolver.log = log; - if (jsc_vm.transpiler.resolver.package_manager) |pm| { - pm.log = log; - } - - defer { - jsc_vm.transpiler.log = old; - jsc_vm.transpiler.linker.log = old; - jsc_vm.transpiler.resolver.log = old; - if (jsc_vm.transpiler.resolver.package_manager) |pm| { - pm.log = old; - } - } - - // this should be a cheap lookup because 24 bytes == 8 * 3 so it's read 3 machine words - const is_node_override = strings.hasPrefixComptime(specifier, NodeFallbackModules.import_path); - - const macro_remappings = if (jsc_vm.macro_mode or !jsc_vm.has_any_macro_remappings or is_node_override) - MacroRemap{} - else - jsc_vm.transpiler.options.macro_remap; - - var fallback_source: logger.Source = undefined; - - // Usually, we want to close the input file automatically. - // - // If we're re-using the file descriptor from the fs watcher - // Do not close it because that will break the kqueue-based watcher - // - var should_close_input_file_fd = fd == null; - - // We don't want cjs wrappers around non-js files - const module_type_only_for_wrappables = switch (loader) { - .js, .jsx, .ts, .tsx => module_type, - else => .unknown, - }; - - var input_file_fd: StoredFileDescriptorType = bun.invalid_fd; - var parse_options = Transpiler.ParseOptions{ - .allocator = allocator, - .path = path, - .loader = loader, - .dirname_fd = bun.invalid_fd, - .file_descriptor = fd, - .file_fd_ptr = &input_file_fd, - .file_hash = hash, - .macro_remappings = macro_remappings, - .jsx = jsc_vm.transpiler.options.jsx, - .emit_decorator_metadata = jsc_vm.transpiler.options.emit_decorator_metadata, - .virtual_source = virtual_source, - .dont_bundle_twice = true, - .allow_commonjs = true, - .module_type = module_type_only_for_wrappables, - .inject_jest_globals = jsc_vm.transpiler.options.rewrite_jest_for_tests, - .keep_json_and_toml_as_one_statement = true, - .allow_bytecode_cache = true, - .set_breakpoint_on_first_line = is_main and - jsc_vm.debugger != null and - jsc_vm.debugger.?.set_breakpoint_on_first_line and - setBreakPointOnFirstLine(), - .runtime_transpiler_cache = if (!disable_transpilying and !JSC.RuntimeTranspilerCache.is_disabled) &cache else null, - .remove_cjs_module_wrapper = is_main and jsc_vm.module_loader.eval_source != null, - }; - defer { - if (should_close_input_file_fd and input_file_fd != bun.invalid_fd) { - input_file_fd.close(); - input_file_fd = bun.invalid_fd; - } - } - - if (is_node_override) { - if (NodeFallbackModules.contentsFromPath(specifier)) |code| { - const fallback_path = Fs.Path.initWithNamespace(specifier, "node"); - fallback_source = logger.Source{ .path = fallback_path, .contents = code }; - parse_options.virtual_source = &fallback_source; - } - } - - var parse_result: ParseResult = switch (disable_transpilying or - (loader == .json)) { - inline else => |return_file_only| brk: { - break :brk jsc_vm.transpiler.parseMaybeReturnFileOnly( - parse_options, - null, - return_file_only, - ) orelse { - if (comptime !disable_transpilying) { - if (jsc_vm.isWatcherEnabled()) { - if (input_file_fd.isValid()) { - if (!is_node_override and std.fs.path.isAbsolute(path.text) and !strings.contains(path.text, "node_modules")) { - should_close_input_file_fd = false; - _ = jsc_vm.bun_watcher.addFile( - input_file_fd, - path.text, - hash, - loader, - .invalid, - package_json, - true, - ); - } - } - } - } - - give_back_arena = false; - return error.ParseError; - }; - }, - }; - - if (parse_result.loader == .wasm) { - return transpileSourceCode( - jsc_vm, - specifier, - referrer, - input_specifier, - path, - .wasm, - .unknown, // cjs/esm don't make sense for wasm - log, - &parse_result.source, - promise_ptr, - source_code_printer, - globalObject, - flags, - ); - } - - if (comptime !disable_transpilying) { - if (jsc_vm.isWatcherEnabled()) { - if (input_file_fd.isValid()) { - if (!is_node_override and std.fs.path.isAbsolute(path.text) and !strings.contains(path.text, "node_modules")) { - should_close_input_file_fd = false; - _ = jsc_vm.bun_watcher.addFile( - input_file_fd, - path.text, - hash, - loader, - .invalid, - package_json, - true, - ); - } - } - } - } - - if (jsc_vm.transpiler.log.errors > 0) { - give_back_arena = false; - return error.ParseError; - } - - if (loader == .json) { - return ResolvedSource{ - .allocator = null, - .source_code = bun.String.createUTF8(parse_result.source.contents), - .specifier = input_specifier, - .source_url = input_specifier.createIfDifferent(path.text), - .tag = ResolvedSource.Tag.json_for_object_loader, - }; - } - - if (comptime disable_transpilying) { - return ResolvedSource{ - .allocator = null, - .source_code = switch (comptime flags) { - .print_source_and_clone => bun.String.init(jsc_vm.allocator.dupe(u8, parse_result.source.contents) catch unreachable), - .print_source => bun.String.init(parse_result.source.contents), - else => @compileError("unreachable"), - }, - .specifier = input_specifier, - .source_url = input_specifier.createIfDifferent(path.text), - }; - } - - if (loader == .json or loader == .jsonc or loader == .toml) { - if (parse_result.empty) { - return ResolvedSource{ - .allocator = null, - .specifier = input_specifier, - .source_url = input_specifier.createIfDifferent(path.text), - .jsvalue_for_export = JSValue.createEmptyObject(jsc_vm.global, 0), - .tag = .exports_object, - }; - } - - return ResolvedSource{ - .allocator = null, - .specifier = input_specifier, - .source_url = input_specifier.createIfDifferent(path.text), - .jsvalue_for_export = parse_result.ast.parts.@"[0]"().stmts[0].data.s_expr.value.toJS(allocator, globalObject orelse jsc_vm.global) catch |e| panic("Unexpected JS error: {s}", .{@errorName(e)}), - .tag = .exports_object, - }; - } - - if (parse_result.already_bundled != .none) { - const bytecode_slice = parse_result.already_bundled.bytecodeSlice(); - return ResolvedSource{ - .allocator = null, - .source_code = bun.String.createLatin1(parse_result.source.contents), - .specifier = input_specifier, - .source_url = input_specifier.createIfDifferent(path.text), - .already_bundled = true, - .bytecode_cache = if (bytecode_slice.len > 0) bytecode_slice.ptr else null, - .bytecode_cache_size = bytecode_slice.len, - .is_commonjs_module = parse_result.already_bundled.isCommonJS(), - }; - } - - if (parse_result.empty) { - const was_cjs = (loader == .js or loader == .ts) and brk: { - const ext = std.fs.path.extension(parse_result.source.path.text); - break :brk strings.eqlComptime(ext, ".cjs") or strings.eqlComptime(ext, ".cts"); - }; - if (was_cjs) { - return .{ - .allocator = null, - .source_code = bun.String.static("(function(){})"), - .specifier = input_specifier, - .source_url = input_specifier.createIfDifferent(path.text), - .is_commonjs_module = true, - .tag = .javascript, - }; - } - } - - if (cache.entry) |*entry| { - jsc_vm.source_mappings.putMappings(parse_result.source, .{ - .list = .{ .items = @constCast(entry.sourcemap), .capacity = entry.sourcemap.len }, - .allocator = bun.default_allocator, - }) catch {}; - - if (comptime Environment.allow_assert) { - dumpSourceString(jsc_vm, specifier, entry.output_code.byteSlice()); - } - - return ResolvedSource{ - .allocator = null, - .source_code = switch (entry.output_code) { - .string => entry.output_code.string, - .utf8 => brk: { - const result = bun.String.createUTF8(entry.output_code.utf8); - cache.output_code_allocator.free(entry.output_code.utf8); - entry.output_code.utf8 = ""; - break :brk result; - }, - }, - .specifier = input_specifier, - .source_url = input_specifier.createIfDifferent(path.text), - .is_commonjs_module = entry.metadata.module_type == .cjs, - .tag = brk: { - if (entry.metadata.module_type == .cjs and parse_result.source.path.isFile()) { - const actual_package_json: *PackageJSON = package_json orelse brk2: { - // this should already be cached virtually always so it's fine to do this - const dir_info = (jsc_vm.transpiler.resolver.readDirInfo(parse_result.source.path.name.dir) catch null) orelse - break :brk .javascript; - - break :brk2 dir_info.package_json orelse dir_info.enclosing_package_json; - } orelse break :brk .javascript; - - if (actual_package_json.module_type == .esm) { - break :brk ResolvedSource.Tag.package_json_type_module; - } - } - - break :brk ResolvedSource.Tag.javascript; - }, - }; - } - - const start_count = jsc_vm.transpiler.linker.import_counter; - - // We _must_ link because: - // - node_modules bundle won't be properly - try jsc_vm.transpiler.linker.link( - path, - &parse_result, - jsc_vm.origin, - .absolute_path, - false, - true, - ); - - if (parse_result.pending_imports.len > 0) { - if (promise_ptr == null) { - return error.UnexpectedPendingResolution; - } - - if (parse_result.source.contents_is_recycled) { - // this shared buffer is about to become owned by the AsyncModule struct - jsc_vm.transpiler.resolver.caches.fs.resetSharedBuffer( - jsc_vm.transpiler.resolver.caches.fs.sharedBuffer(), - ); - } - - jsc_vm.modules.enqueue( - globalObject.?, - .{ - .parse_result = parse_result, - .path = path, - .loader = loader, - .fd = fd, - .package_json = package_json, - .hash = hash, - .promise_ptr = promise_ptr, - .specifier = specifier, - .referrer = referrer, - .arena = arena, - }, - ); - give_back_arena = false; - return error.AsyncModule; - } - - if (!jsc_vm.macro_mode) - jsc_vm.resolved_count += jsc_vm.transpiler.linker.import_counter - start_count; - jsc_vm.transpiler.linker.import_counter = 0; - - var printer = source_code_printer.*; - printer.ctx.reset(); - defer source_code_printer.* = printer; - _ = brk: { - var mapper = jsc_vm.sourceMapHandler(&printer); - - break :brk try jsc_vm.transpiler.printWithSourceMap( - parse_result, - @TypeOf(&printer), - &printer, - .esm_ascii, - mapper.get(), - ); - }; - - if (comptime Environment.dump_source) { - dumpSource(jsc_vm, specifier, &printer); - } - - defer { - if (is_main) { - jsc_vm.has_loaded = true; - } - } - - if (jsc_vm.isWatcherEnabled()) { - var resolved_source = jsc_vm.refCountedResolvedSource(printer.ctx.written, input_specifier, path.text, null, false); - resolved_source.is_commonjs_module = parse_result.ast.has_commonjs_export_names or parse_result.ast.exports_kind == .cjs; - return resolved_source; - } - - // Pass along package.json type "module" if set. - const tag: ResolvedSource.Tag = switch (loader) { - .json, .jsonc => .json_for_object_loader, - .js, .jsx, .ts, .tsx => brk: { - const module_type_ = if (package_json) |pkg| pkg.module_type else module_type; - - break :brk switch (module_type_) { - .esm => .package_json_type_module, - .cjs => .package_json_type_commonjs, - else => .javascript, - }; - }, - else => .javascript, - }; - - return .{ - .allocator = null, - .source_code = brk: { - const written = printer.ctx.getWritten(); - const result = cache.output_code orelse bun.String.createLatin1(written); - - if (written.len > 1024 * 1024 * 2 or jsc_vm.smol) { - printer.ctx.buffer.deinit(); - } - - break :brk result; - }, - .specifier = input_specifier, - .source_url = input_specifier.createIfDifferent(path.text), - .is_commonjs_module = parse_result.ast.has_commonjs_export_names or parse_result.ast.exports_kind == .cjs, - .tag = tag, - }; - }, - // provideFetch() should be called - .napi => unreachable, - // .wasm => { - // jsc_vm.transpiled_count += 1; - // var fd: ?StoredFileDescriptorType = null; - - // var allocator = if (jsc_vm.has_loaded) jsc_vm.arena.allocator() else jsc_vm.allocator; - - // const hash = http.Watcher.getHash(path.text); - // if (jsc_vm.watcher) |watcher| { - // if (watcher.indexOf(hash)) |index| { - // const _fd = watcher.watchlist().items(.fd)[index]; - // fd = if (_fd > 0) _fd else null; - // } - // } - - // var parse_options = Transpiler.ParseOptions{ - // .allocator = allocator, - // .path = path, - // .loader = loader, - // .dirname_fd = 0, - // .file_descriptor = fd, - // .file_hash = hash, - // .macro_remappings = MacroRemap{}, - // .jsx = jsc_vm.transpiler.options.jsx, - // }; - - // var parse_result = jsc_vm.transpiler.parse( - // parse_options, - // null, - // ) orelse { - // return error.ParseError; - // }; - - // return ResolvedSource{ - // .allocator = if (jsc_vm.has_loaded) &jsc_vm.allocator else null, - // .source_code = ZigString.init(jsc_vm.allocator.dupe(u8, parse_result.source.contents) catch unreachable), - // .specifier = ZigString.init(specifier), - // .source_url = input_specifier.createIfDifferent(path.text), - // .tag = ResolvedSource.Tag.wasm, - // }; - // }, - .wasm => { - if (strings.eqlComptime(referrer, "undefined") and strings.eqlLong(jsc_vm.main, path.text, true)) { - if (virtual_source) |source| { - if (globalObject) |globalThis| { - // attempt to avoid reading the WASM file twice. - const encoded = JSC.EncodedJSValue{ - .asPtr = globalThis, - }; - const globalValue = @as(JSValue, @enumFromInt(encoded.asInt64)); - globalValue.put( - globalThis, - ZigString.static("wasmSourceBytes"), - JSC.ArrayBuffer.create(globalThis, source.contents, .Uint8Array), - ); - } - } - return ResolvedSource{ - .allocator = null, - .source_code = bun.String.static(@embedFile("../js/wasi-runner.js")), - .specifier = input_specifier, - .source_url = input_specifier.createIfDifferent(path.text), - .tag = .esm, - }; - } - - return transpileSourceCode( - jsc_vm, - specifier, - referrer, - input_specifier, - path, - .file, - .unknown, // cjs/esm don't make sense for wasm - log, - virtual_source, - promise_ptr, - source_code_printer, - globalObject, - flags, - ); - }, - - .sqlite_embedded, .sqlite => { - const sqlite_module_source_code_string = brk: { - if (jsc_vm.hot_reload == .hot) { - break :brk - \\// Generated code - \\import {Database} from 'bun:sqlite'; - \\const {path} = import.meta; - \\ - \\// Don't reload the database if it's already loaded - \\const registry = (globalThis[Symbol.for("bun:sqlite:hot")] ??= new Map()); - \\ - \\export let db = registry.get(path); - \\export const __esModule = true; - \\if (!db) { - \\ // Load the database - \\ db = new Database(path); - \\ registry.set(path, db); - \\} - \\ - \\export default db; - ; - } - - break :brk - \\// Generated code - \\import {Database} from 'bun:sqlite'; - \\export const db = new Database(import.meta.path); - \\ - \\export const __esModule = true; - \\export default db; - ; - }; - - return ResolvedSource{ - .allocator = null, - .source_code = bun.String.createUTF8(sqlite_module_source_code_string), - .specifier = input_specifier, - .source_url = input_specifier.createIfDifferent(path.text), - .tag = .esm, - }; - }, - - .html => { - if (flags.disableTranspiling()) { - return ResolvedSource{ - .allocator = null, - .source_code = bun.String.empty, - .specifier = input_specifier, - .source_url = input_specifier.createIfDifferent(path.text), - .tag = .esm, - }; - } - - if (globalObject == null) { - return error.NotSupported; - } - - const html_bundle = try JSC.API.HTMLBundle.init(globalObject.?, path.text); - return ResolvedSource{ - .allocator = &jsc_vm.allocator, - .jsvalue_for_export = html_bundle.toJS(globalObject.?), - .specifier = input_specifier, - .source_url = input_specifier.createIfDifferent(path.text), - .tag = .export_default_object, - }; - }, - - else => { - if (flags.disableTranspiling()) { - return ResolvedSource{ - .allocator = null, - .source_code = bun.String.empty, - .specifier = input_specifier, - .source_url = input_specifier.createIfDifferent(path.text), - .tag = .esm, - }; - } - - if (virtual_source == null) { - if (jsc_vm.isWatcherEnabled()) auto_watch: { - if (std.fs.path.isAbsolute(path.text) and !strings.contains(path.text, "node_modules")) { - const input_fd: bun.StoredFileDescriptorType = brk: { - // on macOS, we need a file descriptor to receive event notifications on it. - // so we use O_EVTONLY to open the file descriptor without asking any additional permissions. - if (bun.Watcher.requires_file_descriptors) { - switch (bun.sys.open( - &(std.posix.toPosixPath(path.text) catch break :auto_watch), - bun.c.O_EVTONLY, - 0, - )) { - .err => break :auto_watch, - .result => |fd| break :brk fd, - } - } else { - // Otherwise, don't even bother opening it. - break :brk .invalid; - } - }; - const hash = bun.Watcher.getHash(path.text); - switch (jsc_vm.bun_watcher.addFile( - input_fd, - path.text, - hash, - loader, - .invalid, - null, - true, - )) { - .err => { - if (comptime Environment.isMac) { - // If any error occurs and we just - // opened the file descriptor to - // receive event notifications on - // it, we should close it. - if (input_fd.isValid()) { - input_fd.close(); - } - } - - // we don't consider it a failure if we cannot watch the file - // they didn't open the file - }, - .result => {}, - } - } - } - } - - const value = brk: { - if (!jsc_vm.origin.isEmpty()) { - var buf = MutableString.init2048(jsc_vm.allocator) catch bun.outOfMemory(); - defer buf.deinit(); - var writer = buf.writer(); - JSC.API.Bun.getPublicPath(specifier, jsc_vm.origin, @TypeOf(&writer), &writer); - break :brk bun.String.createUTF8ForJS(globalObject.?, buf.slice()); - } - - break :brk bun.String.createUTF8ForJS(globalObject.?, path.text); - }; - - return ResolvedSource{ - .allocator = null, - .jsvalue_for_export = value, - .specifier = input_specifier, - .source_url = input_specifier.createIfDifferent(path.text), - .tag = .export_default_object, - }; - }, - } - } - - pub export fn Bun__resolveAndFetchBuiltinModule( - jsc_vm: *VirtualMachine, - specifier: *bun.String, - ret: *JSC.ErrorableResolvedSource, - ) bool { - JSC.markBinding(@src()); - var log = logger.Log.init(jsc_vm.transpiler.allocator); - defer log.deinit(); - - const alias = HardcodedModule.Alias.bun_aliases.getWithEql(specifier.*, bun.String.eqlComptime) orelse - return false; - const hardcoded = HardcodedModule.map.get(alias.path) orelse { - bun.debugAssert(false); - return false; - }; - ret.* = .ok( - getHardcodedModule(jsc_vm, specifier.*, hardcoded) orelse - return false, - ); - return true; - } - - pub export fn Bun__fetchBuiltinModule( - jsc_vm: *VirtualMachine, - globalObject: *JSGlobalObject, - specifier: *bun.String, - referrer: *bun.String, - ret: *JSC.ErrorableResolvedSource, - ) bool { - JSC.markBinding(@src()); - var log = logger.Log.init(jsc_vm.transpiler.allocator); - defer log.deinit(); - - if (ModuleLoader.fetchBuiltinModule( - jsc_vm, - specifier.*, - ) catch |err| { - if (err == error.AsyncModule) { - unreachable; - } - - VirtualMachine.processFetchLog(globalObject, specifier.*, referrer.*, &log, ret, err); - return true; - }) |builtin| { - ret.* = JSC.ErrorableResolvedSource.ok(builtin); - return true; - } else { - return false; - } - } - - const always_sync_modules = .{"reflect-metadata"}; - - pub export fn Bun__transpileFile( - jsc_vm: *VirtualMachine, - globalObject: *JSGlobalObject, - specifier_ptr: *bun.String, - referrer: *bun.String, - type_attribute: ?*const bun.String, - ret: *JSC.ErrorableResolvedSource, - allow_promise: bool, - is_commonjs_require: bool, - force_loader_type: bun.options.Loader.Optional, - ) ?*anyopaque { - JSC.markBinding(@src()); - var log = logger.Log.init(jsc_vm.transpiler.allocator); - defer log.deinit(); - - var _specifier = specifier_ptr.toUTF8(jsc_vm.allocator); - var referrer_slice = referrer.toUTF8(jsc_vm.allocator); - defer _specifier.deinit(); - defer referrer_slice.deinit(); - - var type_attribute_str: ?string = null; - if (type_attribute) |attribute| if (attribute.asUTF8()) |attr_utf8| { - type_attribute_str = attr_utf8; - }; - - var virtual_source_to_use: ?logger.Source = null; - var blob_to_deinit: ?JSC.WebCore.Blob = null; - var lr = options.getLoaderAndVirtualSource(_specifier.slice(), jsc_vm, &virtual_source_to_use, &blob_to_deinit, type_attribute_str) catch { - ret.* = JSC.ErrorableResolvedSource.err(error.JSErrorObject, globalObject.ERR(.MODULE_NOT_FOUND, "Blob not found", .{}).toJS().asVoid()); - return null; - }; - defer if (blob_to_deinit) |*blob| blob.deinit(); - - if (force_loader_type.unwrap()) |loader_type| { - @branchHint(.unlikely); - bun.assert(!is_commonjs_require); - lr.loader = loader_type; - } else if (is_commonjs_require and jsc_vm.has_mutated_built_in_extensions > 0) { - @branchHint(.unlikely); - if (node_module_module.findLongestRegisteredExtension(jsc_vm, _specifier.slice())) |entry| { - switch (entry) { - .loader => |loader| { - lr.loader = loader; - }, - .custom => |index| { - ret.* = JSC.ErrorableResolvedSource.ok(ResolvedSource{ - .allocator = null, - .source_code = bun.String.empty, - .specifier = .empty, - .source_url = .empty, - .cjs_custom_extension_index = index, - .tag = .common_js_custom_extension, - }); - return null; - }, - } - } - } - - const module_type: options.ModuleType = brk: { - const ext = lr.path.name.ext; - // regular expression /.[cm][jt]s$/ - if (ext.len == ".cjs".len) { - if (strings.eqlComptimeIgnoreLen(ext, ".cjs")) - break :brk .cjs; - if (strings.eqlComptimeIgnoreLen(ext, ".mjs")) - break :brk .esm; - if (strings.eqlComptimeIgnoreLen(ext, ".cts")) - break :brk .cjs; - if (strings.eqlComptimeIgnoreLen(ext, ".mts")) - break :brk .esm; - } - // regular expression /.[jt]s$/ - if (ext.len == ".ts".len) { - if (strings.eqlComptimeIgnoreLen(ext, ".js") or - strings.eqlComptimeIgnoreLen(ext, ".ts")) - { - // Use the package.json module type if it exists - break :brk if (lr.package_json) |pkg| - pkg.module_type - else - .unknown; - } - } - // For JSX TSX and other extensions, let the file contents. - break :brk .unknown; - }; - const pkg_name: ?[]const u8 = if (lr.package_json) |pkg| - if (pkg.name.len > 0) pkg.name else null - else - null; - - // We only run the transpiler concurrently when we can. - // Today, that's: - // - // Import Statements (import 'foo') - // Import Expressions (import('foo')) - // - transpile_async: { - if (comptime bun.FeatureFlags.concurrent_transpiler) { - const concurrent_loader = lr.loader orelse .file; - if (blob_to_deinit == null and - allow_promise and - (jsc_vm.has_loaded or jsc_vm.is_in_preload) and - concurrent_loader.isJavaScriptLike() and - !lr.is_main and - // Plugins make this complicated, - // TODO: allow running concurrently when no onLoad handlers match a plugin. - jsc_vm.plugin_runner == null and jsc_vm.transpiler_store.enabled) - { - // This absolutely disgusting hack is a workaround in cases - // where an async import is made to a CJS file with side - // effects that other modules depend on, without incurring - // the cost of transpiling/loading CJS modules synchronously. - // - // The cause of this comes from the fact that we immediately - // and synchronously evaluate CJS modules after they've been - // transpiled, but transpiling (which, for async imports, - // happens in a thread pool), can resolve in whatever order. - // This messes up module execution order. - // - // This is only _really_ important for - // import("some-polyfill") cases, the most impactful of - // which is `reflect-metadata`. People could also use - // require or just preload their polyfills, but they aren't - // doing this. This hack makes important polyfills work without - // incurring the cost of transpiling/loading CJS modules - // synchronously. The proper fix is to evaluate CJS modules - // at the same time as ES modules. This is blocked by the - // fact that we need exports from CJS modules and our parser - // doesn't record them. - if (pkg_name) |pkg_name_| { - inline for (always_sync_modules) |always_sync_specifier| { - if (bun.strings.eqlComptime(pkg_name_, always_sync_specifier)) { - break :transpile_async; - } - } - } - - // TODO: check if the resolved source must be transpiled synchronously - return jsc_vm.transpiler_store.transpile( - jsc_vm, - globalObject, - specifier_ptr.dupeRef(), - lr.path, - referrer.dupeRef(), - concurrent_loader, - lr.package_json, - ); - } - } - } - - const synchronous_loader: options.Loader = lr.loader orelse loader: { - if (jsc_vm.has_loaded or jsc_vm.is_in_preload) { - // Extensionless files in this context are treated as the JS loader - if (lr.path.name.ext.len == 0) { - break :loader .tsx; - } - - // Unknown extensions are to be treated as file loader - if (is_commonjs_require) { - if (jsc_vm.commonjs_custom_extensions.entries.len > 0 and - jsc_vm.has_mutated_built_in_extensions == 0) - { - @branchHint(.unlikely); - if (node_module_module.findLongestRegisteredExtension(jsc_vm, lr.path.text)) |entry| { - switch (entry) { - .loader => |loader| break :loader loader, - .custom => |index| { - ret.* = JSC.ErrorableResolvedSource.ok(ResolvedSource{ - .allocator = null, - .source_code = bun.String.empty, - .specifier = .empty, - .source_url = .empty, - .cjs_custom_extension_index = index, - .tag = .common_js_custom_extension, - }); - return null; - }, - } - } - } - - // For Node.js compatibility, requiring a file with an - // unknown extension will be treated as a JS file - break :loader .ts; - } - - // For ESM, Bun treats unknown extensions as file loader - break :loader .file; - } else { - // Unless it's potentially the main module - // This is important so that "bun run ./foo-i-have-no-extension" works - break :loader .tsx; - } - }; - - if (comptime Environment.allow_assert) - debug("transpile({s}, {s}, sync)", .{ lr.specifier, @tagName(synchronous_loader) }); - - defer jsc_vm.module_loader.resetArena(jsc_vm); - - var promise: ?*JSC.JSInternalPromise = null; - ret.* = JSC.ErrorableResolvedSource.ok( - ModuleLoader.transpileSourceCode( - jsc_vm, - lr.specifier, - referrer_slice.slice(), - specifier_ptr.*, - lr.path, - synchronous_loader, - module_type, - &log, - lr.virtual_source, - if (allow_promise) &promise else null, - VirtualMachine.source_code_printer.?, - globalObject, - FetchFlags.transpile, - ) catch |err| { - if (err == error.AsyncModule) { - bun.assert(promise != null); - return promise; - } - - if (err == error.PluginError) { - return null; - } - - VirtualMachine.processFetchLog(globalObject, specifier_ptr.*, referrer.*, &log, ret, err); - return null; - }, - ); - return promise; - } - - export fn Bun__runVirtualModule(globalObject: *JSGlobalObject, specifier_ptr: *const bun.String) JSValue { - JSC.markBinding(@src()); - if (globalObject.bunVM().plugin_runner == null) return JSValue.zero; - - const specifier_slice = specifier_ptr.toUTF8(bun.default_allocator); - defer specifier_slice.deinit(); - const specifier = specifier_slice.slice(); - - if (!PluginRunner.couldBePlugin(specifier)) { - return JSValue.zero; - } - - const namespace = PluginRunner.extractNamespace(specifier); - const after_namespace = if (namespace.len == 0) - specifier - else - specifier[@min(namespace.len + 1, specifier.len)..]; - - return globalObject.runOnLoadPlugins(bun.String.init(namespace), bun.String.init(after_namespace), .bun) orelse - return JSValue.zero; - } - - fn getHardcodedModule(jsc_vm: *VirtualMachine, specifier: bun.String, hardcoded: HardcodedModule) ?ResolvedSource { - Analytics.Features.builtin_modules.insert(hardcoded); - return switch (hardcoded) { - .@"bun:main" => .{ - .allocator = null, - .source_code = bun.String.createUTF8(jsc_vm.entry_point.source.contents), - .specifier = specifier, - .source_url = specifier, - .tag = .esm, - .source_code_needs_deref = true, - }, - .@"bun:internal-for-testing" => { - if (!Environment.isDebug) { - if (!is_allowed_to_use_internal_testing_apis) - return null; - } - return jsSyntheticModule(.@"bun:internal-for-testing", specifier); - }, - .@"bun:wrap" => .{ - .allocator = null, - .source_code = String.init(Runtime.Runtime.sourceCode()), - .specifier = specifier, - .source_url = specifier, - }, - inline else => |tag| jsSyntheticModule(@field(ResolvedSource.Tag, @tagName(tag)), specifier), - }; - } - - pub fn fetchBuiltinModule(jsc_vm: *VirtualMachine, specifier: bun.String) !?ResolvedSource { - if (HardcodedModule.map.getWithEql(specifier, bun.String.eqlComptime)) |hardcoded| { - return getHardcodedModule(jsc_vm, specifier, hardcoded); - } - - if (specifier.hasPrefixComptime(js_ast.Macro.namespaceWithColon)) { - const spec = specifier.toUTF8(bun.default_allocator); - defer spec.deinit(); - if (jsc_vm.macro_entry_points.get(MacroEntryPoint.generateIDFromSpecifier(spec.slice()))) |entry| { - return .{ - .allocator = null, - .source_code = bun.String.createUTF8(entry.source.contents), - .specifier = specifier, - .source_url = specifier.dupeRef(), - }; - } - } else if (jsc_vm.standalone_module_graph) |graph| { - const specifier_utf8 = specifier.toUTF8(bun.default_allocator); - defer specifier_utf8.deinit(); - if (graph.files.getPtr(specifier_utf8.slice())) |file| { - if (file.loader == .sqlite or file.loader == .sqlite_embedded) { - const code = - \\/* Generated code */ - \\import {Database} from 'bun:sqlite'; - \\import {readFileSync} from 'node:fs'; - \\export const db = new Database(readFileSync(import.meta.path)); - \\ - \\export const __esModule = true; - \\export default db; - ; - return .{ - .allocator = null, - .source_code = bun.String.static(code), - .specifier = specifier, - .source_url = specifier.dupeRef(), - .source_code_needs_deref = false, - }; - } - - return .{ - .allocator = null, - .source_code = file.toWTFString(), - .specifier = specifier, - .source_url = specifier.dupeRef(), - .source_code_needs_deref = false, - .bytecode_cache = if (file.bytecode.len > 0) file.bytecode.ptr else null, - .bytecode_cache_size = file.bytecode.len, - .is_commonjs_module = file.module_format == .cjs, - }; - } - } - - return null; - } - - export fn Bun__transpileVirtualModule( - globalObject: *JSGlobalObject, - specifier_ptr: *const bun.String, - referrer_ptr: *const bun.String, - source_code: *ZigString, - loader_: Api.Loader, - ret: *JSC.ErrorableResolvedSource, - ) bool { - JSC.markBinding(@src()); - const jsc_vm = globalObject.bunVM(); - bun.assert(jsc_vm.plugin_runner != null); - - var specifier_slice = specifier_ptr.toUTF8(jsc_vm.allocator); - const specifier = specifier_slice.slice(); - defer specifier_slice.deinit(); - var source_code_slice = source_code.toSlice(jsc_vm.allocator); - defer source_code_slice.deinit(); - var referrer_slice = referrer_ptr.toUTF8(jsc_vm.allocator); - defer referrer_slice.deinit(); - - var virtual_source = logger.Source.initPathString(specifier, source_code_slice.slice()); - var log = logger.Log.init(jsc_vm.allocator); - const path = Fs.Path.init(specifier); - - const loader = if (loader_ != ._none) - options.Loader.fromAPI(loader_) - else - jsc_vm.transpiler.options.loaders.get(path.name.ext) orelse brk: { - if (strings.eqlLong(specifier, jsc_vm.main, true)) { - break :brk options.Loader.js; - } - - break :brk options.Loader.file; - }; - - defer log.deinit(); - defer jsc_vm.module_loader.resetArena(jsc_vm); - - ret.* = JSC.ErrorableResolvedSource.ok( - ModuleLoader.transpileSourceCode( - jsc_vm, - specifier_slice.slice(), - referrer_slice.slice(), - specifier_ptr.*, - path, - loader, - .unknown, - &log, - &virtual_source, - null, - VirtualMachine.source_code_printer.?, - globalObject, - FetchFlags.transpile, - ) catch |err| { - if (err == error.PluginError) { - return true; - } - VirtualMachine.processFetchLog(globalObject, specifier_ptr.*, referrer_ptr.*, &log, ret, err); - return true; - }, - ); - Analytics.Features.virtual_modules += 1; - return true; - } - - comptime { - _ = Bun__transpileVirtualModule; - _ = Bun__runVirtualModule; - _ = Bun__transpileFile; - _ = Bun__fetchBuiltinModule; - _ = Bun__getDefaultLoader; - } -}; - -pub const FetchFlags = enum { - transpile, - print_source, - print_source_and_clone, - - pub fn disableTranspiling(this: FetchFlags) bool { - return this != .transpile; - } -}; - -const SavedSourceMap = JSC.SavedSourceMap; - -pub const HardcodedModule = enum { - bun, - @"abort-controller", - @"bun:ffi", - @"bun:jsc", - @"bun:main", - @"bun:test", // usually replaced by the transpiler but `await import("bun:" + "test")` has to work - @"bun:wrap", - @"bun:sqlite", - @"node:assert", - @"node:assert/strict", - @"node:async_hooks", - @"node:buffer", - @"node:child_process", - @"node:console", - @"node:constants", - @"node:crypto", - @"node:dns", - @"node:dns/promises", - @"node:domain", - @"node:events", - @"node:fs", - @"node:fs/promises", - @"node:http", - @"node:https", - @"node:module", - @"node:net", - @"node:os", - @"node:path", - @"node:path/posix", - @"node:path/win32", - @"node:perf_hooks", - @"node:process", - @"node:querystring", - @"node:readline", - @"node:readline/promises", - @"node:stream", - @"node:stream/consumers", - @"node:stream/promises", - @"node:stream/web", - @"node:string_decoder", - @"node:test", - @"node:timers", - @"node:timers/promises", - @"node:tls", - @"node:tty", - @"node:url", - @"node:util", - @"node:util/types", - @"node:vm", - @"node:wasi", - @"node:zlib", - @"node:worker_threads", - @"node:punycode", - undici, - ws, - @"isomorphic-fetch", - @"node-fetch", - vercel_fetch, - @"utf-8-validate", - @"node:v8", - @"node:trace_events", - @"node:repl", - @"node:inspector", - @"node:http2", - @"node:diagnostics_channel", - @"node:dgram", - @"node:cluster", - @"node:_stream_duplex", - @"node:_stream_passthrough", - @"node:_stream_readable", - @"node:_stream_transform", - @"node:_stream_wrap", - @"node:_stream_writable", - @"node:_tls_common", - /// This is gated behind '--expose-internals' - @"bun:internal-for-testing", - - /// The module loader first uses `Aliases` to get a single string during - /// resolution, then maps that single string to the actual module. - /// Do not include aliases here; Those go in `Aliases`. - pub const map = bun.ComptimeStringMap(HardcodedModule, [_]struct { []const u8, HardcodedModule }{ - // Bun - .{ "bun", .bun }, - .{ "bun:ffi", .@"bun:ffi" }, - .{ "bun:jsc", .@"bun:jsc" }, - .{ "bun:main", .@"bun:main" }, - .{ "bun:test", .@"bun:test" }, - .{ "bun:sqlite", .@"bun:sqlite" }, - .{ "bun:wrap", .@"bun:wrap" }, - .{ "bun:internal-for-testing", .@"bun:internal-for-testing" }, - // Node.js - .{ "node:assert", .@"node:assert" }, - .{ "node:assert/strict", .@"node:assert/strict" }, - .{ "node:async_hooks", .@"node:async_hooks" }, - .{ "node:buffer", .@"node:buffer" }, - .{ "node:child_process", .@"node:child_process" }, - .{ "node:cluster", .@"node:cluster" }, - .{ "node:console", .@"node:console" }, - .{ "node:constants", .@"node:constants" }, - .{ "node:crypto", .@"node:crypto" }, - .{ "node:dgram", .@"node:dgram" }, - .{ "node:diagnostics_channel", .@"node:diagnostics_channel" }, - .{ "node:dns", .@"node:dns" }, - .{ "node:dns/promises", .@"node:dns/promises" }, - .{ "node:domain", .@"node:domain" }, - .{ "node:events", .@"node:events" }, - .{ "node:fs", .@"node:fs" }, - .{ "node:fs/promises", .@"node:fs/promises" }, - .{ "node:http", .@"node:http" }, - .{ "node:http2", .@"node:http2" }, - .{ "node:https", .@"node:https" }, - .{ "node:inspector", .@"node:inspector" }, - .{ "node:module", .@"node:module" }, - .{ "node:net", .@"node:net" }, - .{ "node:readline", .@"node:readline" }, - .{ "node:test", .@"node:test" }, - .{ "node:os", .@"node:os" }, - .{ "node:path", .@"node:path" }, - .{ "node:path/posix", .@"node:path/posix" }, - .{ "node:path/win32", .@"node:path/win32" }, - .{ "node:perf_hooks", .@"node:perf_hooks" }, - .{ "node:process", .@"node:process" }, - .{ "node:punycode", .@"node:punycode" }, - .{ "node:querystring", .@"node:querystring" }, - .{ "node:readline", .@"node:readline" }, - .{ "node:readline/promises", .@"node:readline/promises" }, - .{ "node:repl", .@"node:repl" }, - .{ "node:stream", .@"node:stream" }, - .{ "node:stream/consumers", .@"node:stream/consumers" }, - .{ "node:stream/promises", .@"node:stream/promises" }, - .{ "node:stream/web", .@"node:stream/web" }, - .{ "node:string_decoder", .@"node:string_decoder" }, - .{ "node:timers", .@"node:timers" }, - .{ "node:timers/promises", .@"node:timers/promises" }, - .{ "node:tls", .@"node:tls" }, - .{ "node:trace_events", .@"node:trace_events" }, - .{ "node:tty", .@"node:tty" }, - .{ "node:url", .@"node:url" }, - .{ "node:util", .@"node:util" }, - .{ "node:util/types", .@"node:util/types" }, - .{ "node:v8", .@"node:v8" }, - .{ "node:vm", .@"node:vm" }, - .{ "node:wasi", .@"node:wasi" }, - .{ "node:worker_threads", .@"node:worker_threads" }, - .{ "node:zlib", .@"node:zlib" }, - .{ "node:_stream_duplex", .@"node:_stream_duplex" }, - .{ "node:_stream_passthrough", .@"node:_stream_passthrough" }, - .{ "node:_stream_readable", .@"node:_stream_readable" }, - .{ "node:_stream_transform", .@"node:_stream_transform" }, - .{ "node:_stream_wrap", .@"node:_stream_wrap" }, - .{ "node:_stream_writable", .@"node:_stream_writable" }, - .{ "node:_tls_common", .@"node:_tls_common" }, - - .{ "node-fetch", HardcodedModule.@"node-fetch" }, - .{ "isomorphic-fetch", HardcodedModule.@"isomorphic-fetch" }, - .{ "undici", HardcodedModule.undici }, - .{ "ws", HardcodedModule.ws }, - .{ "@vercel/fetch", HardcodedModule.vercel_fetch }, - .{ "utf-8-validate", HardcodedModule.@"utf-8-validate" }, - .{ "abort-controller", HardcodedModule.@"abort-controller" }, - }); - - /// Contains the list of built-in modules from the perspective of the module - /// loader. This logic is duplicated for `isBuiltinModule` and the like. - pub const Alias = struct { - path: [:0]const u8, - tag: ImportRecord.Tag = .builtin, - node_builtin: bool = false, - node_only_prefix: bool = false, - - fn nodeEntry(path: [:0]const u8) struct { string, Alias } { - return .{ - path, - .{ - .path = if (path.len > 5 and std.mem.eql(u8, path[0..5], "node:")) path else "node:" ++ path, - .node_builtin = true, - }, - }; - } - fn nodeEntryOnlyPrefix(path: [:0]const u8) struct { string, Alias } { - return .{ - path, - .{ - .path = if (path.len > 5 and std.mem.eql(u8, path[0..5], "node:")) path else "node:" ++ path, - .node_builtin = true, - .node_only_prefix = true, - }, - }; - } - fn entry(path: [:0]const u8) struct { string, Alias } { - return .{ path, .{ .path = path } }; - } - - // Applied to both --target=bun and --target=node - const common_alias_kvs = [_]struct { string, Alias }{ - nodeEntry("node:assert"), - nodeEntry("node:assert/strict"), - nodeEntry("node:async_hooks"), - nodeEntry("node:buffer"), - nodeEntry("node:child_process"), - nodeEntry("node:cluster"), - nodeEntry("node:console"), - nodeEntry("node:constants"), - nodeEntry("node:crypto"), - nodeEntry("node:dgram"), - nodeEntry("node:diagnostics_channel"), - nodeEntry("node:dns"), - nodeEntry("node:dns/promises"), - nodeEntry("node:domain"), - nodeEntry("node:events"), - nodeEntry("node:fs"), - nodeEntry("node:fs/promises"), - nodeEntry("node:http"), - nodeEntry("node:http2"), - nodeEntry("node:https"), - nodeEntry("node:inspector"), - nodeEntry("node:module"), - nodeEntry("node:net"), - nodeEntry("node:os"), - nodeEntry("node:path"), - nodeEntry("node:path/posix"), - nodeEntry("node:path/win32"), - nodeEntry("node:perf_hooks"), - nodeEntry("node:process"), - nodeEntry("node:punycode"), - nodeEntry("node:querystring"), - nodeEntry("node:readline"), - nodeEntry("node:readline/promises"), - nodeEntry("node:repl"), - nodeEntry("node:stream"), - nodeEntry("node:stream/consumers"), - nodeEntry("node:stream/promises"), - nodeEntry("node:stream/web"), - nodeEntry("node:string_decoder"), - nodeEntry("node:timers"), - nodeEntry("node:timers/promises"), - nodeEntry("node:tls"), - nodeEntry("node:trace_events"), - nodeEntry("node:tty"), - nodeEntry("node:url"), - nodeEntry("node:util"), - nodeEntry("node:util/types"), - nodeEntry("node:v8"), - nodeEntry("node:vm"), - nodeEntry("node:wasi"), - nodeEntry("node:worker_threads"), - nodeEntry("node:zlib"), - // New Node.js builtins only resolve from the prefixed one. - nodeEntryOnlyPrefix("node:test"), - - nodeEntry("assert"), - nodeEntry("assert/strict"), - nodeEntry("async_hooks"), - nodeEntry("buffer"), - nodeEntry("child_process"), - nodeEntry("cluster"), - nodeEntry("console"), - nodeEntry("constants"), - nodeEntry("crypto"), - nodeEntry("dgram"), - nodeEntry("diagnostics_channel"), - nodeEntry("dns"), - nodeEntry("dns/promises"), - nodeEntry("domain"), - nodeEntry("events"), - nodeEntry("fs"), - nodeEntry("fs/promises"), - nodeEntry("http"), - nodeEntry("http2"), - nodeEntry("https"), - nodeEntry("inspector"), - nodeEntry("module"), - nodeEntry("net"), - nodeEntry("os"), - nodeEntry("path"), - nodeEntry("path/posix"), - nodeEntry("path/win32"), - nodeEntry("perf_hooks"), - nodeEntry("process"), - nodeEntry("punycode"), - nodeEntry("querystring"), - nodeEntry("readline"), - nodeEntry("readline/promises"), - nodeEntry("repl"), - nodeEntry("stream"), - nodeEntry("stream/consumers"), - nodeEntry("stream/promises"), - nodeEntry("stream/web"), - nodeEntry("string_decoder"), - nodeEntry("timers"), - nodeEntry("timers/promises"), - nodeEntry("tls"), - nodeEntry("trace_events"), - nodeEntry("tty"), - nodeEntry("url"), - nodeEntry("util"), - nodeEntry("util/types"), - nodeEntry("v8"), - nodeEntry("vm"), - nodeEntry("wasi"), - nodeEntry("worker_threads"), - nodeEntry("zlib"), - - // sys is a deprecated alias for util - .{ "sys", .{ .path = "node:util", .node_builtin = true } }, - .{ "node:sys", .{ .path = "node:util", .node_builtin = true } }, - - // These are returned in builtinModules, but probably not many - // packages use them so we will just alias them. - .{ "node:_http_agent", .{ .path = "node:http", .node_builtin = true } }, - .{ "node:_http_client", .{ .path = "node:http", .node_builtin = true } }, - .{ "node:_http_common", .{ .path = "node:http", .node_builtin = true } }, - .{ "node:_http_incoming", .{ .path = "node:http", .node_builtin = true } }, - .{ "node:_http_outgoing", .{ .path = "node:http", .node_builtin = true } }, - .{ "node:_http_server", .{ .path = "node:http", .node_builtin = true } }, - .{ "node:_stream_duplex", .{ .path = "node:_stream_duplex", .node_builtin = true } }, - .{ "node:_stream_passthrough", .{ .path = "node:_stream_passthrough", .node_builtin = true } }, - .{ "node:_stream_readable", .{ .path = "node:_stream_readable", .node_builtin = true } }, - .{ "node:_stream_transform", .{ .path = "node:_stream_transform", .node_builtin = true } }, - .{ "node:_stream_wrap", .{ .path = "node:_stream_wrap", .node_builtin = true } }, - .{ "node:_stream_writable", .{ .path = "node:_stream_writable", .node_builtin = true } }, - .{ "node:_tls_wrap", .{ .path = "node:tls", .node_builtin = true } }, - .{ "node:_tls_common", .{ .path = "node:_tls_common", .node_builtin = true } }, - .{ "_http_agent", .{ .path = "node:http", .node_builtin = true } }, - .{ "_http_client", .{ .path = "node:http", .node_builtin = true } }, - .{ "_http_common", .{ .path = "node:http", .node_builtin = true } }, - .{ "_http_incoming", .{ .path = "node:http", .node_builtin = true } }, - .{ "_http_outgoing", .{ .path = "node:http", .node_builtin = true } }, - .{ "_http_server", .{ .path = "node:http", .node_builtin = true } }, - .{ "_stream_duplex", .{ .path = "node:_stream_duplex", .node_builtin = true } }, - .{ "_stream_passthrough", .{ .path = "node:_stream_passthrough", .node_builtin = true } }, - .{ "_stream_readable", .{ .path = "node:_stream_readable", .node_builtin = true } }, - .{ "_stream_transform", .{ .path = "node:_stream_transform", .node_builtin = true } }, - .{ "_stream_wrap", .{ .path = "node:_stream_wrap", .node_builtin = true } }, - .{ "_stream_writable", .{ .path = "node:_stream_writable", .node_builtin = true } }, - .{ "_tls_wrap", .{ .path = "node:tls", .node_builtin = true } }, - .{ "_tls_common", .{ .path = "node:_tls_common", .node_builtin = true } }, - }; - - const bun_extra_alias_kvs = [_]struct { string, Alias }{ - .{ "bun", .{ .path = "bun", .tag = .bun } }, - .{ "bun:test", .{ .path = "bun:test", .tag = .bun_test } }, - .{ "bun:ffi", .{ .path = "bun:ffi" } }, - .{ "bun:jsc", .{ .path = "bun:jsc" } }, - .{ "bun:sqlite", .{ .path = "bun:sqlite" } }, - .{ "bun:wrap", .{ .path = "bun:wrap" } }, - .{ "bun:internal-for-testing", .{ .path = "bun:internal-for-testing" } }, - .{ "ffi", .{ .path = "bun:ffi" } }, - - // inspector/promises is not implemented, it is an alias of inspector - .{ "node:inspector/promises", .{ .path = "node:inspector", .node_builtin = true } }, - .{ "inspector/promises", .{ .path = "node:inspector", .node_builtin = true } }, - - // Thirdparty packages we override - .{ "@vercel/fetch", .{ .path = "@vercel/fetch" } }, - .{ "isomorphic-fetch", .{ .path = "isomorphic-fetch" } }, - .{ "node-fetch", .{ .path = "node-fetch" } }, - .{ "undici", .{ .path = "undici" } }, - .{ "utf-8-validate", .{ .path = "utf-8-validate" } }, - .{ "ws", .{ .path = "ws" } }, - .{ "ws/lib/websocket", .{ .path = "ws" } }, - - // Polyfills we force to native - .{ "abort-controller", .{ .path = "abort-controller" } }, - .{ "abort-controller/polyfill", .{ .path = "abort-controller" } }, - - // To force Next.js to not use bundled dependencies. - .{ "next/dist/compiled/ws", .{ .path = "ws" } }, - .{ "next/dist/compiled/node-fetch", .{ .path = "node-fetch" } }, - .{ "next/dist/compiled/undici", .{ .path = "undici" } }, - }; - - const node_extra_alias_kvs = [_]struct { string, Alias }{ - nodeEntry("node:inspector/promises"), - nodeEntry("inspector/promises"), - }; - - const node_aliases = bun.ComptimeStringMap(Alias, common_alias_kvs ++ node_extra_alias_kvs); - const bun_aliases = bun.ComptimeStringMap(Alias, common_alias_kvs ++ bun_extra_alias_kvs); - - pub fn has(name: []const u8, target: options.Target) bool { - return get(name, target) != null; - } - - pub fn get(name: []const u8, target: options.Target) ?Alias { - if (target.isBun()) { - return bun_aliases.get(name); - } else if (target.isNode()) { - return node_aliases.get(name); - } - return null; - } - }; -}; - -/// Support embedded .node files -export fn Bun__resolveEmbeddedNodeFile(vm: *VirtualMachine, in_out_str: *bun.String) bool { - if (vm.standalone_module_graph == null) return false; - - const input_path = in_out_str.toUTF8(bun.default_allocator); - defer input_path.deinit(); - const result = ModuleLoader.resolveEmbeddedFile(vm, input_path.slice(), "node") orelse return false; - in_out_str.* = bun.String.createUTF8(result); - return true; -} - -export fn ModuleLoader__isBuiltin(data: [*]const u8, len: usize) bool { - const str = data[0..len]; - return HardcodedModule.Alias.bun_aliases.get(str) != null; -} diff --git a/src/bun.js/node.zig b/src/bun.js/node.zig new file mode 100644 index 0000000000..c73c702054 --- /dev/null +++ b/src/bun.js/node.zig @@ -0,0 +1,383 @@ +//! Node.js APIs in Bun. Access this namespace with `bun.api.node` +comptime { + _ = process.getTitle; + _ = process.setTitle; + _ = @import("node/util/parse_args.zig"); +} + +/// node:fs +pub const fs = @import("node/node_fs.zig"); +/// node:path +pub const path = @import("node/path.zig"); +/// node:crypto +pub const crypto = @import("node/node_crypto_binding.zig"); +/// node:os +pub const os = @import("node/node_os.zig"); +/// node:process +pub const process = @import("node/node_process.zig"); +pub const validators = @import("node/util/validators.zig"); +pub const ErrorCode = @import("node/nodejs_error_code.zig").Code; + +pub const Buffer = JSC.MarkedArrayBuffer; + +const types = @import("node/types.zig"); +pub const PathOrBlob = types.PathOrBlob; +pub const Dirent = types.Dirent; +pub const FileSystemFlags = types.FileSystemFlags; +pub const PathOrFileDescriptor = types.PathOrFileDescriptor; +pub const modeFromJS = types.modeFromJS; +pub const VectorArrayBuffer = types.VectorArrayBuffer; +pub const Valid = types.Valid; +pub const PathLike = types.PathLike; +pub const CallbackTask = types.CallbackTask; +pub const PathOrBuffer = types.PathOrBuffer; +pub const jsAssertEncodingValid = types.jsAssertEncodingValid; +pub const Encoding = types.Encoding; +pub const StringOrBuffer = types.StringOrBuffer; +pub const BlobOrStringOrBuffer = types.BlobOrStringOrBuffer; + +const stat = @import("node/Stat.zig"); +pub const Stats = stat.Stats; +pub const StatsBig = stat.StatsBig; +pub const StatsSmall = stat.StatsSmall; + +const statfs = @import("node/StatFS.zig"); +pub const StatFSSmall = statfs.StatFSSmall; +pub const StatFSBig = statfs.StatFSBig; +pub const StatFS = statfs.StatFS; + +pub const uid_t = if (Environment.isPosix) std.posix.uid_t else bun.windows.libuv.uv_uid_t; +pub const gid_t = if (Environment.isPosix) std.posix.gid_t else bun.windows.libuv.uv_gid_t; + +pub const time_like = @import("node/time_like.zig"); +pub const TimeLike = time_like.TimeLike; +pub const timeLikeFromJS = time_like.fromJS; + +/// Node.js expects the error to include contextual information +/// - "syscall" +/// - "path" +/// - "errno" +/// +/// We can't really use Zig's error handling for syscalls because Node.js expects the "real" errno to be returned +/// and various issues with std.posix that make it too unstable for arbitrary user input (e.g. how .BADF is marked as unreachable) +pub fn Maybe(comptime ReturnTypeT: type, comptime ErrorTypeT: type) type { + // can't call @hasDecl on void, anyerror, etc + const has_any_decls = ErrorTypeT != void and ErrorTypeT != anyerror; + const has_retry = has_any_decls and @hasDecl(ErrorTypeT, "retry"); + const has_todo = has_any_decls and @hasDecl(ErrorTypeT, "todo"); + + return union(Tag) { + pub const ErrorType = ErrorTypeT; + pub const ReturnType = ReturnTypeT; + + err: ErrorType, + result: ReturnType, + + /// NOTE: this has to have a well defined layout (e.g. setting to `u8`) + /// experienced a bug with a Maybe(void, void) + /// creating the `err` variant of this type + /// resulted in Zig incorrectly setting the tag, leading to a switch + /// statement to just not work. + /// we (Zack, Dylan, Chloe, Mason) observed that it was set to 0xFF in ReleaseFast in the debugger + pub const Tag = enum(u8) { err, result }; + + pub const retry: @This() = if (has_retry) .{ .err = ErrorType.retry } else .{ .err = .{} }; + pub const success: @This() = .{ + .result = std.mem.zeroes(ReturnType), + }; + /// This value is technically garbage, but that is okay as `.aborted` is + /// only meant to be returned in an operation when there is an aborted + /// `AbortSignal` object associated with the operation. + pub const aborted: @This() = .{ .err = .{ + .errno = @intFromEnum(posix.E.INTR), + .syscall = .access, + } }; + + pub fn assert(this: @This()) ReturnType { + switch (this) { + .err => |err| { + bun.Output.panic("Unexpected error\n{}", .{err}); + }, + .result => |result| return result, + } + } + + pub inline fn todo() @This() { + if (Environment.allow_assert) { + if (comptime ReturnType == void) { + @panic("TODO called!"); + } + @panic(comptime "TODO: Maybe(" ++ bun.meta.typeName(ReturnType) ++ ")"); + } + if (has_todo) { + return .{ .err = ErrorType.todo() }; + } + return .{ .err = ErrorType{} }; + } + + pub fn isTrue(this: @This()) bool { + if (comptime ReturnType != bool) @compileError("This function can only be called on bool"); + return switch (this) { + .result => |r| r, + else => false, + }; + } + + pub fn unwrap(this: @This()) !ReturnType { + return switch (this) { + .result => |r| r, + .err => |e| bun.errnoToZigErr(e.errno), + }; + } + + /// Unwrap the value if it is `result` or use the provided `default_value` + pub inline fn unwrapOr(this: @This(), default_value: ReturnType) ReturnType { + return switch (this) { + .result => |v| v, + .err => default_value, + }; + } + + pub inline fn initErr(e: ErrorType) Maybe(ReturnType, ErrorType) { + return .{ .err = e }; + } + + pub inline fn initErrWithP(e: bun.sys.SystemErrno, syscall: sys.Tag, file_path: anytype) Maybe(ReturnType, ErrorType) { + return .{ .err = .{ + .errno = @intFromEnum(e), + .syscall = syscall, + .path = file_path, + } }; + } + + pub inline fn asErr(this: *const @This()) ?ErrorType { + if (this.* == .err) return this.err; + return null; + } + + pub inline fn asValue(this: *const @This()) ?ReturnType { + if (this.* == .result) return this.result; + return null; + } + + pub inline fn isOk(this: *const @This()) bool { + return switch (this.*) { + .result => true, + .err => false, + }; + } + + pub inline fn isErr(this: *const @This()) bool { + return switch (this.*) { + .result => false, + .err => true, + }; + } + + pub inline fn initResult(result: ReturnType) Maybe(ReturnType, ErrorType) { + return .{ .result = result }; + } + + pub inline fn mapErr(this: @This(), comptime E: type, err_fn: *const fn (ErrorTypeT) E) Maybe(ReturnType, E) { + return switch (this) { + .result => |v| .{ .result = v }, + .err => |e| .{ .err = err_fn(e) }, + }; + } + + pub inline fn toCssResult(this: @This()) Maybe(ReturnType, bun.css.ParseError(bun.css.ParserError)) { + return switch (ErrorTypeT) { + bun.css.BasicParseError => { + return switch (this) { + .result => |v| return .{ .result = v }, + .err => |e| return .{ .err = e.intoDefaultParseError() }, + }; + }, + bun.css.ParseError(bun.css.ParserError) => @compileError("Already a ParseError(ParserError)"), + else => @compileError("Bad!"), + }; + } + + pub fn toJS(this: @This(), globalObject: *JSC.JSGlobalObject) JSC.JSValue { + return switch (this) { + .result => |r| switch (ReturnType) { + JSC.JSValue => r, + + void => .undefined, + bool => JSC.JSValue.jsBoolean(r), + + JSC.ArrayBuffer => r.toJS(globalObject, null), + []u8 => JSC.ArrayBuffer.fromBytes(r, .ArrayBuffer).toJS(globalObject, null), + + else => switch (@typeInfo(ReturnType)) { + .int, .float, .comptime_int, .comptime_float => JSC.JSValue.jsNumber(r), + .@"struct", .@"enum", .@"opaque", .@"union" => r.toJS(globalObject), + .pointer => { + if (bun.trait.isZigString(ReturnType)) + JSC.ZigString.init(bun.asByteSlice(r)).withEncoding().toJS(globalObject); + + return r.toJS(globalObject); + }, + }, + }, + .err => |e| e.toJSC(globalObject), + }; + } + + pub fn toArrayBuffer(this: @This(), globalObject: *JSC.JSGlobalObject) JSC.JSValue { + return switch (this) { + .result => |r| JSC.ArrayBuffer.fromBytes(r, .ArrayBuffer).toJS(globalObject, null), + .err => |e| e.toJSC(globalObject), + }; + } + + pub fn getErrno(this: @This()) posix.E { + return switch (this) { + .result => posix.E.SUCCESS, + .err => |e| @enumFromInt(e.errno), + }; + } + + pub fn errnoSys(rc: anytype, syscall: sys.Tag) ?@This() { + if (comptime Environment.isWindows) { + if (comptime @TypeOf(rc) == std.os.windows.NTSTATUS) {} else { + if (rc != 0) return null; + } + } + return switch (sys.getErrno(rc)) { + .SUCCESS => null, + else => |e| @This(){ + // always truncate + .err = .{ + .errno = translateToErrInt(e), + .syscall = syscall, + }, + }, + }; + } + + pub fn errno(err: anytype, syscall: sys.Tag) @This() { + return @This(){ + // always truncate + .err = .{ + .errno = translateToErrInt(err), + .syscall = syscall, + }, + }; + } + + pub fn errnoSysFd(rc: anytype, syscall: sys.Tag, fd: bun.FileDescriptor) ?@This() { + if (comptime Environment.isWindows) { + if (comptime @TypeOf(rc) == std.os.windows.NTSTATUS) {} else { + if (rc != 0) return null; + } + } + return switch (sys.getErrno(rc)) { + .SUCCESS => null, + else => |e| @This(){ + // Always truncate + .err = .{ + .errno = translateToErrInt(e), + .syscall = syscall, + .fd = fd, + }, + }, + }; + } + + pub fn errnoSysP(rc: anytype, syscall: sys.Tag, file_path: anytype) ?@This() { + if (bun.meta.Item(@TypeOf(file_path)) == u16) { + @compileError("Do not pass WString path to errnoSysP, it needs the path encoded as utf8"); + } + if (comptime Environment.isWindows) { + if (comptime @TypeOf(rc) == std.os.windows.NTSTATUS) {} else { + if (rc != 0) return null; + } + } + return switch (sys.getErrno(rc)) { + .SUCCESS => null, + else => |e| @This(){ + // Always truncate + .err = .{ + .errno = translateToErrInt(e), + .syscall = syscall, + .path = bun.asByteSlice(file_path), + }, + }, + }; + } + + pub fn errnoSysFP(rc: anytype, syscall: sys.Tag, fd: bun.FileDescriptor, file_path: anytype) ?@This() { + if (comptime Environment.isWindows) { + if (comptime @TypeOf(rc) == std.os.windows.NTSTATUS) {} else { + if (rc != 0) return null; + } + } + return switch (sys.getErrno(rc)) { + .SUCCESS => null, + else => |e| @This(){ + // Always truncate + .err = .{ + .errno = translateToErrInt(e), + .syscall = syscall, + .fd = fd, + .path = bun.asByteSlice(file_path), + }, + }, + }; + } + + pub fn errnoSysPD(rc: anytype, syscall: sys.Tag, file_path: anytype, dest: anytype) ?@This() { + if (bun.meta.Item(@TypeOf(file_path)) == u16) { + @compileError("Do not pass WString path to errnoSysPD, it needs the path encoded as utf8"); + } + if (comptime Environment.isWindows) { + if (comptime @TypeOf(rc) == std.os.windows.NTSTATUS) {} else { + if (rc != 0) return null; + } + } + return switch (sys.getErrno(rc)) { + .SUCCESS => null, + else => |e| @This(){ + // Always truncate + .err = .{ + .errno = translateToErrInt(e), + .syscall = syscall, + .path = bun.asByteSlice(file_path), + .dest = bun.asByteSlice(dest), + }, + }, + }; + } + }; +} + +fn translateToErrInt(err: anytype) bun.sys.Error.Int { + return switch (@TypeOf(err)) { + bun.windows.NTSTATUS => @intFromEnum(bun.windows.translateNTStatusToErrno(err)), + else => @truncate(@intFromEnum(err)), + }; +} + +const std = @import("std"); +const bun = @import("bun"); +const sys = bun.sys; +const Environment = bun.Environment; +const meta = bun.meta; +const windows = bun.windows; +const heap_allocator = bun.default_allocator; +const kernel32 = windows.kernel32; +const logger = bun.logger; +const posix = std.posix; +const path_handler = bun.path; +const strings = bun.strings; +const string = bun.string; +const L = strings.literal; +const Fs = @import("../fs.zig"); +const IdentityContext = @import("../../identity_context.zig").IdentityContext; +const JSC = bun.JSC; +const Mode = bun.Mode; +const Syscall = bun.sys; +const URL = @import("../../url.zig").URL; +const Value = std.json.Value; +const JSError = bun.JSError; diff --git a/src/bun.js/node/buffer.zig b/src/bun.js/node/buffer.zig index 4366569a9c..397242b404 100644 --- a/src/bun.js/node/buffer.zig +++ b/src/bun.js/node/buffer.zig @@ -1,7 +1,7 @@ const std = @import("std"); const bun = @import("bun"); const JSC = bun.JSC; -const Encoder = JSC.WebCore.Encoder; +const Encoder = JSC.WebCore.encoding; const Environment = bun.Environment; pub const BufferVectorized = struct { @@ -61,7 +61,7 @@ pub const BufferVectorized = struct { inline 4, 8, 16 => |n| if (comptime Environment.isMac) { const pattern = buf[0..n]; buf = buf[pattern.len..]; - @field(bun.C, std.fmt.comptimePrint("memset_pattern{d}", .{n}))(buf.ptr, pattern.ptr, buf.len); + @field(bun.c, std.fmt.comptimePrint("memset_pattern{d}", .{n}))(buf.ptr, pattern.ptr, buf.len); return true; }, else => {}, diff --git a/src/bun.js/node/dir_iterator.zig b/src/bun.js/node/dir_iterator.zig index 9be5cc5020..774f48a8c4 100644 --- a/src/bun.js/node/dir_iterator.zig +++ b/src/bun.js/node/dir_iterator.zig @@ -11,7 +11,7 @@ const posix = std.posix; const Dir = std.fs.Dir; const JSC = bun.JSC; -const PathString = JSC.PathString; +const PathString = bun.PathString; const bun = @import("bun"); const IteratorError = error{ AccessDenied, SystemResources } || posix.UnexpectedError; @@ -270,7 +270,7 @@ pub fn NewIterator(comptime use_windows_ospath: bool) type { bun.sys.syslog("NtQueryDirectoryFile({}) = {s}", .{ bun.FD.fromStdDir(self.dir), @tagName(rc) }); return .{ .err = .{ - .errno = @intFromEnum(bun.C.SystemErrno.ENOTDIR), + .errno = @intFromEnum(bun.sys.SystemErrno.ENOTDIR), .syscall = .NtQueryDirectoryFile, }, }; @@ -296,7 +296,7 @@ pub fn NewIterator(comptime use_windows_ospath: bool) type { return .{ .err = .{ - .errno = @intFromEnum(bun.C.SystemErrno.EUNKNOWN), + .errno = @intFromEnum(bun.sys.SystemErrno.EUNKNOWN), .syscall = .NtQueryDirectoryFile, }, }; diff --git a/src/bun.js/node/fs_events.zig b/src/bun.js/node/fs_events.zig index 04da57b35e..1f43869944 100644 --- a/src/bun.js/node/fs_events.zig +++ b/src/bun.js/node/fs_events.zig @@ -9,7 +9,7 @@ const string = bun.string; const PathWatcher = @import("./path_watcher.zig").PathWatcher; const EventType = PathWatcher.EventType; -const Event = bun.JSC.Node.FSWatcher.Event; +const Event = bun.JSC.Node.fs.Watcher.Event; pub const CFAbsoluteTime = f64; pub const CFTimeInterval = f64; @@ -183,7 +183,7 @@ var fsevents_cf: ?CoreFoundation = null; var fsevents_cs: ?CoreServices = null; fn InitLibrary() void { - const fsevents_cf_handle = bun.C.dlopen("/System/Library/Frameworks/CoreFoundation.framework/Versions/A/CoreFoundation", .{ .LAZY = true, .LOCAL = true }); + const fsevents_cf_handle = bun.sys.dlopen("/System/Library/Frameworks/CoreFoundation.framework/Versions/A/CoreFoundation", .{ .LAZY = true, .LOCAL = true }); if (fsevents_cf_handle == null) @panic("Cannot Load CoreFoundation"); fsevents_cf = CoreFoundation{ @@ -202,7 +202,7 @@ fn InitLibrary() void { .RunLoopDefaultMode = dlsym(fsevents_cf_handle, *CFStringRef, "kCFRunLoopDefaultMode") orelse @panic("Cannot Load CoreFoundation"), }; - const fsevents_cs_handle = bun.C.dlopen("/System/Library/Frameworks/CoreServices.framework/Versions/A/CoreServices", .{ .LAZY = true, .LOCAL = true }); + const fsevents_cs_handle = bun.sys.dlopen("/System/Library/Frameworks/CoreServices.framework/Versions/A/CoreServices", .{ .LAZY = true, .LOCAL = true }); if (fsevents_cs_handle == null) @panic("Cannot Load CoreServices"); fsevents_cs = CoreServices{ diff --git a/src/bun.js/node/node_cluster_binding.zig b/src/bun.js/node/node_cluster_binding.zig index 4885278894..ae6155b810 100644 --- a/src/bun.js/node/node_cluster_binding.zig +++ b/src/bun.js/node/node_cluster_binding.zig @@ -71,7 +71,7 @@ pub fn sendHelperChild(globalThis: *JSC.JSGlobalObject, callframe: *JSC.CallFram const ex = globalThis.createTypeErrorInstance("sendInternal() failed", .{}); ex.put(globalThis, ZigString.static("syscall"), bun.String.static("write").toJS(globalThis)); const fnvalue = JSC.JSFunction.create(globalThis, "", S.impl, 1, .{}); - Bun__Process__queueNextTick1(globalThis, fnvalue, ex); + fnvalue.callNextTick(globalThis, .{ex}); return .false; } diff --git a/src/bun.js/node/node_fs.zig b/src/bun.js/node/node_fs.zig index 59968e8052..83f5ba724e 100644 --- a/src/bun.js/node/node_fs.zig +++ b/src/bun.js/node/node_fs.zig @@ -1,49 +1,10 @@ // This file contains the underlying implementation for sync & async functions // for interacting with the filesystem from JavaScript. // The top-level functions assume the arguments are already validated -const std = @import("std"); -const bun = @import("bun"); -const strings = bun.strings; -const windows = bun.windows; -const c = bun.c; -const string = bun.string; -const JSC = bun.JSC; -const PathString = JSC.PathString; -const Environment = bun.Environment; -const C = bun.C; -const system = std.posix.system; -const Maybe = JSC.Maybe; -const Encoding = JSC.Node.Encoding; -const PosixToWinNormalizer = bun.path.PosixToWinNormalizer; - -const FileDescriptor = bun.FileDescriptor; -const FD = bun.FD; - -const AbortSignal = JSC.AbortSignal; - -const Syscall = if (Environment.isWindows) bun.sys.sys_uv else bun.sys; - -const Constants = @import("./node_fs_constant.zig").Constants; -const builtin = @import("builtin"); -const posix = std.posix; -const darwin = std.os.darwin; -const linux = std.os.linux; -const PathLike = JSC.Node.PathLike; -const PathOrFileDescriptor = JSC.Node.PathOrFileDescriptor; -const DirIterator = @import("./dir_iterator.zig"); -const Path = @import("../../resolver/resolve_path.zig"); -const FileSystem = @import("../../fs.zig").FileSystem; -const ArgumentsSlice = JSC.Node.ArgumentsSlice; -const TimeLike = JSC.Node.TimeLike; -const Mode = bun.Mode; -const uv = bun.windows.libuv; -const E = C.E; -const uid_t = JSC.Node.uid_t; -const gid_t = JSC.Node.gid_t; -const ReadPosition = i64; -const StringOrBuffer = JSC.Node.StringOrBuffer; -const NodeFSFunctionEnum = std.meta.DeclEnum(JSC.Node.NodeFS); -const UvFsCallback = fn (*uv.fs_t) callconv(.C) void; +pub const constants = @import("node_fs_constant.zig"); +pub const Binding = @import("node_fs_binding.zig").Binding; +pub const Watcher = @import("node_fs_watcher.zig").FSWatcher; +pub const StatWatcher = @import("node_fs_stat_watcher.zig").StatWatcher; pub const default_permission = if (Environment.isPosix) Syscall.S.IRUSR | @@ -62,7 +23,7 @@ else pub const Flavor = enum { sync, @"async" }; const ArrayBuffer = JSC.MarkedArrayBuffer; -const Buffer = JSC.Buffer; +const Buffer = bun.api.node.Buffer; const FileSystemFlags = JSC.Node.FileSystemFlags; pub const Async = struct { pub const access = NewAsyncFSTask(Return.Access, Arguments.Access, NodeFS.access); @@ -177,19 +138,19 @@ pub const Async = struct { req: uv.fs_t = std.mem.zeroes(uv.fs_t), result: JSC.Maybe(ReturnType), ref: bun.Async.KeepAlive = .{}, - tracker: JSC.AsyncTaskTracker, + tracker: JSC.Debugger.AsyncTaskTracker, pub const Task = @This(); pub const heap_label = "Async" ++ bun.meta.typeBaseName(@typeName(ArgumentType)) ++ "UvTask"; - pub fn create(globalObject: *JSC.JSGlobalObject, this: *JSC.Node.NodeJSFS, task_args: ArgumentType, vm: *JSC.VirtualMachine) JSC.JSValue { + pub fn create(globalObject: *JSC.JSGlobalObject, this: *JSC.Node.fs.Binding, task_args: ArgumentType, vm: *JSC.VirtualMachine) JSC.JSValue { var task = bun.new(Task, .{ .promise = JSC.JSPromise.Strong.init(globalObject), .args = task_args, .result = undefined, .globalObject = globalObject, - .tracker = JSC.AsyncTaskTracker.init(vm), + .tracker = JSC.Debugger.AsyncTaskTracker.init(vm), }); task.ref.ref(vm); task.args.toThreadSafe(); @@ -387,7 +348,7 @@ pub const Async = struct { task: JSC.WorkPoolTask = .{ .callback = &workPoolCallback }, result: JSC.Maybe(ReturnType), ref: bun.Async.KeepAlive = .{}, - tracker: JSC.AsyncTaskTracker, + tracker: JSC.Debugger.AsyncTaskTracker, /// NewAsyncFSTask supports cancelable operations via AbortSignal, /// so long as a "signal" field exists. The task wrapper will ensure @@ -399,7 +360,7 @@ pub const Async = struct { pub fn create( globalObject: *JSC.JSGlobalObject, - _: *JSC.Node.NodeJSFS, + _: *bun.api.node.fs.Binding, args: ArgumentType, vm: *JSC.VirtualMachine, ) JSC.JSValue { @@ -408,7 +369,7 @@ pub const Async = struct { .args = args, .result = undefined, .globalObject = globalObject, - .tracker = JSC.AsyncTaskTracker.init(vm), + .tracker = JSC.Debugger.AsyncTaskTracker.init(vm), }); task.ref.ref(vm); task.args.toThreadSafe(); @@ -505,7 +466,7 @@ pub fn NewAsyncCpTask(comptime is_shell: bool) type { /// alive by the shell instance ref: if (!is_shell) bun.Async.KeepAlive else struct {} = .{}, arena: bun.ArenaAllocator, - tracker: JSC.AsyncTaskTracker, + tracker: JSC.Debugger.AsyncTaskTracker, has_result: std.atomic.Value(bool), /// On each creation of a `AsyncCpSingleFileTask`, this is incremented. /// When each task is finished, decrement. @@ -551,7 +512,7 @@ pub fn NewAsyncCpTask(comptime is_shell: bool) type { const result = node_fs._copySingleFileSync( this.src, this.dest, - @enumFromInt((if (args.flags.errorOnExist or !args.flags.force) Constants.COPYFILE_EXCL else @as(u8, 0))), + @enumFromInt((if (args.flags.errorOnExist or !args.flags.force) constants.COPYFILE_EXCL else @as(u8, 0))), null, this.cp_task.args, ); @@ -602,7 +563,7 @@ pub fn NewAsyncCpTask(comptime is_shell: bool) type { pub fn create( globalObject: *JSC.JSGlobalObject, - _: *JSC.Node.NodeJSFS, + _: *JSC.Node.fs.Binding, cp_args: Arguments.Cp, vm: *JSC.VirtualMachine, arena: bun.ArenaAllocator, @@ -627,7 +588,7 @@ pub fn NewAsyncCpTask(comptime is_shell: bool) type { .has_result = .{ .raw = false }, .result = undefined, .evtloop = .{ .js = vm.event_loop }, - .tracker = JSC.AsyncTaskTracker.init(vm), + .tracker = JSC.Debugger.AsyncTaskTracker.init(vm), .arena = arena, .subtask_count = .{ .raw = 1 }, .shelltask = shelltask, @@ -656,7 +617,7 @@ pub fn NewAsyncCpTask(comptime is_shell: bool) type { .has_result = .{ .raw = false }, .result = undefined, .evtloop = .{ .mini = mini }, - .tracker = JSC.AsyncTaskTracker{ .id = 0 }, + .tracker = JSC.Debugger.AsyncTaskTracker{ .id = 0 }, .arena = arena, .subtask_count = .{ .raw = 1 }, .shelltask = shelltask, @@ -765,7 +726,7 @@ pub fn NewAsyncCpTask(comptime is_shell: bool) type { const attributes = c.GetFileAttributesW(src); if (attributes == c.INVALID_FILE_ATTRIBUTES) { this.finishConcurrently(.{ .err = .{ - .errno = @intFromEnum(C.SystemErrno.ENOENT), + .errno = @intFromEnum(SystemErrno.ENOENT), .syscall = .copyfile, .path = nodefs.osPathIntoSyncErrorBuf(src), } }); @@ -778,9 +739,9 @@ pub fn NewAsyncCpTask(comptime is_shell: bool) type { dest, if (comptime is_shell) // Shell always forces copy - @enumFromInt(Constants.Copyfile.force) + @enumFromInt(constants.Copyfile.force) else - @enumFromInt((if (args.flags.errorOnExist or !args.flags.force) Constants.COPYFILE_EXCL else @as(u8, 0))), + @enumFromInt((if (args.flags.errorOnExist or !args.flags.force) constants.COPYFILE_EXCL else @as(u8, 0))), attributes, this.args, ); @@ -807,7 +768,7 @@ pub fn NewAsyncCpTask(comptime is_shell: bool) type { const r = nodefs._copySingleFileSync( src, dest, - @enumFromInt((if (args.flags.errorOnExist or !args.flags.force) Constants.COPYFILE_EXCL else @as(u8, 0))), + @enumFromInt((if (args.flags.errorOnExist or !args.flags.force) constants.COPYFILE_EXCL else @as(u8, 0))), stat_, this.args, ); @@ -851,7 +812,7 @@ pub fn NewAsyncCpTask(comptime is_shell: bool) type { const dest = dest_buf[0..dest_dir_len :0]; if (comptime Environment.isMac) { - if (Maybe(Return.Cp).errnoSysP(C.clonefile(src, dest, 0), .clonefile, src)) |err| { + if (Maybe(Return.Cp).errnoSysP(c.clonefile(src, dest, 0), .clonefile, src)) |err| { switch (err.getErrno()) { .ACCES, .NAMETOOLONG, @@ -980,7 +941,7 @@ pub const AsyncReaddirRecursiveTask = struct { globalObject: *JSC.JSGlobalObject, task: JSC.WorkPoolTask = .{ .callback = &workPoolCallback }, ref: bun.Async.KeepAlive = .{}, - tracker: JSC.AsyncTaskTracker, + tracker: JSC.Debugger.AsyncTaskTracker, // It's not 100% clear this one is necessary has_result: std.atomic.Value(bool), @@ -1087,7 +1048,7 @@ pub const AsyncReaddirRecursiveTask = struct { .args = args, .has_result = .{ .raw = false }, .globalObject = globalObject, - .tracker = JSC.AsyncTaskTracker.init(vm), + .tracker = JSC.Debugger.AsyncTaskTracker.init(vm), .subtask_count = .{ .raw = 1 }, .root_path = PathString.init(bun.default_allocator.dupeZ(u8, args.path.slice()) catch bun.outOfMemory()), .result_list = switch (args.tag()) { @@ -3034,7 +2995,7 @@ pub const Arguments = struct { pub const CopyFile = struct { src: PathLike, dest: PathLike, - mode: Constants.Copyfile, + mode: constants.Copyfile, pub fn deinit(this: *const CopyFile) void { this.src.deinit(); @@ -3062,7 +3023,7 @@ pub const Arguments = struct { }; errdefer dest.deinit(); - var mode: Constants.Copyfile = @enumFromInt(0); + var mode: constants.Copyfile = @enumFromInt(0); if (arguments.next()) |arg| { arguments.eat(); mode = @enumFromInt(@intFromEnum(try FileSystemFlags.fromJSNumberOnly(ctx, arg, .copy_file))); @@ -3082,7 +3043,7 @@ pub const Arguments = struct { flags: Flags, const Flags = struct { - mode: Constants.Copyfile, + mode: constants.Copyfile, recursive: bool, errorOnExist: bool, force: bool, @@ -3161,9 +3122,9 @@ pub const Arguments = struct { pub const UnwatchFile = void; - pub const Watch = JSC.Node.FSWatcher.Arguments; + pub const Watch = Watcher.Arguments; - pub const WatchFile = JSC.Node.StatWatcher.Arguments; + pub const WatchFile = StatWatcher.Arguments; pub const Fsync = struct { fd: FileDescriptor, @@ -3580,7 +3541,7 @@ pub const NodeFS = struct { if (args.mode.isForceClone()) { // https://www.manpagez.com/man/2/clonefile/ - return ret.errnoSysP(C.clonefile(src, dest, 0), .copyfile, src) orelse ret.success; + return ret.errnoSysP(c.clonefile(src, dest, 0), .copyfile, src) orelse ret.success; } else { const stat_ = switch (Syscall.stat(src)) { .result => |result| result, @@ -3589,7 +3550,7 @@ pub const NodeFS = struct { if (!posix.S.ISREG(stat_.mode)) { return Maybe(Return.CopyFile){ .err = .{ - .errno = @intFromEnum(C.SystemErrno.ENOTSUP), + .errno = @intFromEnum(SystemErrno.ENOTSUP), .syscall = .copyfile, } }; } @@ -3602,7 +3563,7 @@ pub const NodeFS = struct { _ = Syscall.unlink(dest); } - if (ret.errnoSysP(C.clonefile(src, dest, 0), .copyfile, src) == null) { + if (ret.errnoSysP(c.clonefile(src, dest, 0), .copyfile, src) == null) { _ = Syscall.chmod(dest, stat_.mode); return ret.success; } @@ -3621,7 +3582,7 @@ pub const NodeFS = struct { flags |= bun.O.EXCL; } - const dest_fd = switch (Syscall.open(dest, flags, JSC.Node.default_permission)) { + const dest_fd = switch (Syscall.open(dest, flags, JSC.Node.fs.default_permission)) { .result => |result| result, .err => |err| return Maybe(Return.CopyFile){ .err = err.withPath(args.dest.slice()) }, }; @@ -3638,12 +3599,12 @@ pub const NodeFS = struct { // we fallback to copyfile() when the file is > 128 KB and clonefile fails // clonefile() isn't supported on all devices // nor is it supported across devices - var mode: u32 = C.darwin.COPYFILE_ACL | C.darwin.COPYFILE_DATA; + var mode: u32 = c.COPYFILE_ACL | c.COPYFILE_DATA; if (args.mode.shouldntOverwrite()) { - mode |= C.darwin.COPYFILE_EXCL; + mode |= c.COPYFILE_EXCL; } - return ret.errnoSysP(C.copyfile(src, dest, null, mode), .copyfile, src) orelse ret.success; + return ret.errnoSysP(c.copyfile(src, dest, null, mode), .copyfile, src) orelse ret.success; } if (comptime Environment.isLinux) { @@ -3666,7 +3627,7 @@ pub const NodeFS = struct { }; if (!posix.S.ISREG(stat_.mode)) { - return Maybe(Return.CopyFile){ .err = .{ .errno = @intFromEnum(C.SystemErrno.ENOTSUP), .syscall = .copyfile } }; + return Maybe(Return.CopyFile){ .err = .{ .errno = @intFromEnum(SystemErrno.ENOTSUP), .syscall = .copyfile } }; } var flags: i32 = bun.O.CREAT | bun.O.WRONLY; @@ -3675,7 +3636,7 @@ pub const NodeFS = struct { flags |= bun.O.EXCL; } - const dest_fd = switch (Syscall.open(dest, flags, JSC.Node.default_permission)) { + const dest_fd = switch (Syscall.open(dest, flags, JSC.Node.fs.default_permission)) { .result => |result| result, .err => |err| return Maybe(Return.CopyFile){ .err = err }, }; @@ -3684,7 +3645,7 @@ pub const NodeFS = struct { // https://manpages.debian.org/testing/manpages-dev/ioctl_ficlone.2.en.html if (args.mode.isForceClone()) { - if (ret.errnoSysP(bun.C.linux.ioctl_ficlone(dest_fd, src_fd), .ioctl_ficlone, dest)) |err| { + if (ret.errnoSysP(bun.linux.ioctl_ficlone(dest_fd, src_fd), .ioctl_ficlone, dest)) |err| { dest_fd.close(); // This is racey, but it's the best we can do _ = bun.sys.unlink(dest); @@ -3697,7 +3658,7 @@ pub const NodeFS = struct { // If we know it's a regular file and ioctl_ficlone is available, attempt to use it. if (posix.S.ISREG(stat_.mode) and bun.can_use_ioctl_ficlone()) { - const rc = bun.C.linux.ioctl_ficlone(dest_fd, src_fd); + const rc = bun.linux.ioctl_ficlone(dest_fd, src_fd); if (rc == 0) { _ = Syscall.fchmod(dest_fd, stat_.mode); dest_fd.close(); @@ -3897,7 +3858,7 @@ pub const NodeFS = struct { } const path = args.path.sliceZ(&this.sync_error_buf); - return Maybe(Return.Lchmod).errnoSysP(C.lchmod(path, args.mode), .lchmod, path) orelse + return Maybe(Return.Lchmod).errnoSysP(c.lchmod(path, @truncate(args.mode)), .lchmod, path) orelse Maybe(Return.Lchmod).success; } @@ -3908,7 +3869,7 @@ pub const NodeFS = struct { const path = args.path.sliceZ(&this.sync_error_buf); - return Maybe(Return.Lchown).errnoSysP(C.lchown(path, args.uid, args.gid), .lchown, path) orelse + return Maybe(Return.Lchown).errnoSysP(c.lchown(path, args.uid, args.gid), .lchown, path) orelse Maybe(Return.Lchown).success; } @@ -3942,7 +3903,7 @@ pub const NodeFS = struct { pub fn mkdir(this: *NodeFS, args: Arguments.Mkdir, _: Flavor) Maybe(Return.Mkdir) { if (args.path.slice().len == 0) return .{ .err = .{ - .errno = @intFromEnum(bun.C.E.NOENT), + .errno = @intFromEnum(bun.sys.E.NOENT), .syscall = .mkdir, .path = "", } }; @@ -4172,14 +4133,14 @@ pub const NodeFS = struct { }; } - const rc = C.mkdtemp(prefix_buf); + const rc = c.mkdtemp(prefix_buf); if (rc) |ptr| { return .{ .result = JSC.ZigString.dupeForJS(bun.sliceTo(ptr, 0), bun.default_allocator) catch bun.outOfMemory(), }; } - // bun.C.getErrno(rc) returns SUCCESS if rc is -1 so we call std.c._errno() directly + // c.getErrno(rc) returns SUCCESS if rc is -1 so we call std.c._errno() directly const errno = @as(std.c.E, @enumFromInt(std.c._errno().*)); return .{ .err = Syscall.Error{ @@ -4506,7 +4467,7 @@ pub const NodeFS = struct { }) |current| : (entry = iterator.next()) { if (ExpectedType == JSC.Node.Dirent) { if (dirent_path.isEmpty()) { - dirent_path = JSC.WebCore.Encoder.toBunString(strings.withoutNTPrefix(std.meta.Child(@TypeOf(basename)), basename), args.encoding); + dirent_path = JSC.WebCore.encoding.toBunString(strings.withoutNTPrefix(std.meta.Child(@TypeOf(basename)), basename), args.encoding); } } if (comptime !is_u16) { @@ -4515,7 +4476,7 @@ pub const NodeFS = struct { JSC.Node.Dirent => { dirent_path.ref(); entries.append(.{ - .name = JSC.WebCore.Encoder.toBunString(utf8_name, args.encoding), + .name = JSC.WebCore.encoding.toBunString(utf8_name, args.encoding), .path = dirent_path, .kind = current.kind, }) catch bun.outOfMemory(); @@ -4524,7 +4485,7 @@ pub const NodeFS = struct { entries.append(Buffer.fromString(utf8_name, bun.default_allocator) catch bun.outOfMemory()) catch bun.outOfMemory(); }, bun.String => { - entries.append(JSC.WebCore.Encoder.toBunString(utf8_name, args.encoding)) catch bun.outOfMemory(); + entries.append(JSC.WebCore.encoding.toBunString(utf8_name, args.encoding)) catch bun.outOfMemory(); }, else => @compileError("unreachable"), } @@ -4546,7 +4507,7 @@ pub const NodeFS = struct { .utf8 => entries.append(bun.String.createUTF16(utf16_name)) catch bun.outOfMemory(), else => |enc| { const utf8_path = bun.strings.fromWPath(re_encoding_buffer.?, utf16_name); - entries.append(JSC.WebCore.Encoder.toBunString(utf8_path, enc)) catch bun.outOfMemory(); + entries.append(JSC.WebCore.encoding.toBunString(utf8_path, enc)) catch bun.outOfMemory(); }, }, else => @compileError("unreachable"), @@ -4810,11 +4771,11 @@ pub const NodeFS = struct { const path_u8 = bun.path.dirname(bun.path.join(&[_]string{ root_basename, name_to_copy }, .auto), .auto); if (dirent_path_prev.isEmpty() or !bun.strings.eql(dirent_path_prev.byteSlice(), path_u8)) { dirent_path_prev.deref(); - dirent_path_prev = JSC.WebCore.Encoder.toBunString(strings.withoutNTPrefix(std.meta.Child(@TypeOf(path_u8)), path_u8), args.encoding); + dirent_path_prev = JSC.WebCore.encoding.toBunString(strings.withoutNTPrefix(std.meta.Child(@TypeOf(path_u8)), path_u8), args.encoding); } dirent_path_prev.ref(); entries.append(.{ - .name = JSC.WebCore.Encoder.toBunString(utf8_name, args.encoding), + .name = JSC.WebCore.encoding.toBunString(utf8_name, args.encoding), .path = dirent_path_prev, .kind = current.kind, }) catch bun.outOfMemory(); @@ -4823,7 +4784,7 @@ pub const NodeFS = struct { entries.append(Buffer.fromString(strings.withoutNTPrefix(std.meta.Child(@TypeOf(name_to_copy)), name_to_copy), bun.default_allocator) catch bun.outOfMemory()) catch bun.outOfMemory(); }, bun.String => { - entries.append(JSC.WebCore.Encoder.toBunString(strings.withoutNTPrefix(std.meta.Child(@TypeOf(name_to_copy)), name_to_copy), args.encoding)) catch bun.outOfMemory(); + entries.append(JSC.WebCore.encoding.toBunString(strings.withoutNTPrefix(std.meta.Child(@TypeOf(name_to_copy)), name_to_copy), args.encoding)) catch bun.outOfMemory(); }, else => @compileError(unreachable), } @@ -4846,7 +4807,7 @@ pub const NodeFS = struct { if ( // Typed arrays in JavaScript are limited to 4.7 GB. - adjusted_size > JSC.synthetic_allocation_limit or + adjusted_size > JSC.VirtualMachine.synthetic_allocation_limit or // If they do not have enough memory to open the file and they're on Linux, let's throw an error instead of dealing with the OOM killer. (Environment.isLinux and size >= bun.getTotalMemorySize())) { @@ -5108,7 +5069,7 @@ pub const NodeFS = struct { if (comptime string_type == .default) { return .{ .result = .{ - .transcoded_string = JSC.WebCore.Encoder.toBunString(temporary_read_buffer, args.encoding), + .transcoded_string = JSC.WebCore.encoding.toBunString(temporary_read_buffer, args.encoding), }, }; } else { @@ -5328,33 +5289,29 @@ pub const NodeFS = struct { var written: usize = 0; // Attempt to pre-allocate large files - if (Environment.isLinux) { - preallocate: { - // Worthwhile after 6 MB at least on ext4 linux - if (buf.len >= bun.C.preallocate_length) { - const offset: usize = if (args.file == .path) - // on mac, it's relatively positioned - 0 - else brk: { - // on linux, it's absolutely positione + // Worthwhile after 6 MB at least on ext4 linux + if (bun.sys.preallocate_supported and buf.len >= bun.sys.preallocate_length) preallocate: { + const offset: usize = if (args.file == .path) + // on mac, it's relatively positioned + 0 + else brk: { + // on linux, it's absolutely positione - switch (Syscall.lseek( - fd, - @as(std.posix.off_t, @intCast(0)), - std.os.linux.SEEK.CUR, - )) { - .err => break :preallocate, - .result => |pos| break :brk @as(usize, @intCast(pos)), - } - }; - - bun.C.preallocate_file( - fd.cast(), - @as(std.posix.off_t, @intCast(offset)), - @as(std.posix.off_t, @intCast(buf.len)), - ) catch {}; + switch (Syscall.lseek( + fd, + @as(std.posix.off_t, @intCast(0)), + std.os.linux.SEEK.CUR, + )) { + .err => break :preallocate, + .result => |pos| break :brk @as(usize, @intCast(pos)), } - } + }; + + bun.sys.preallocate_file( + fd.cast(), + @as(std.posix.off_t, @intCast(offset)), + @as(std.posix.off_t, @intCast(buf.len)), + ) catch {}; } while (buf.len > 0) { @@ -5497,7 +5454,7 @@ pub const NodeFS = struct { }; }, else => |enc| .{ - .string = .{ .utf8 = .{}, .underlying = JSC.WebCore.Encoder.toBunString(buf, enc) }, + .string = .{ .utf8 = .{}, .underlying = JSC.WebCore.encoding.toBunString(buf, enc) }, }, }, }; @@ -5549,7 +5506,7 @@ pub const NodeFS = struct { }; }, else => |enc| .{ - .string = .{ .utf8 = .{}, .underlying = JSC.WebCore.Encoder.toBunString(buf, enc) }, + .string = .{ .utf8 = .{}, .underlying = JSC.WebCore.encoding.toBunString(buf, enc) }, }, }, }; @@ -5572,7 +5529,7 @@ pub const NodeFS = struct { pub fn rmdir(this: *NodeFS, args: Arguments.RmDir, _: Flavor) Maybe(Return.Rmdir) { if (args.recursive) { zigDeleteTree(std.fs.cwd(), args.path.slice(), .directory) catch |err| { - var errno: bun.C.E = switch (@as(anyerror, err)) { + var errno: bun.sys.E = switch (@as(anyerror, err)) { error.AccessDenied => .PERM, error.FileTooBig => .FBIG, error.SymLinkLoop => .LOOP, @@ -5874,7 +5831,7 @@ pub const NodeFS = struct { }; } - return Maybe(Return.Truncate).errnoSysP(C.truncate(path.sliceZ(&this.sync_error_buf), len), .truncate, path.slice()) orelse + return Maybe(Return.Truncate).errnoSysP(c.truncate(path.sliceZ(&this.sync_error_buf), len), .truncate, path.slice()) orelse Maybe(Return.Truncate).success; } @@ -6040,20 +5997,20 @@ pub const NodeFS = struct { const dest = dest_buf[0..dest_dir_len :0]; if (Environment.isWindows) { - const attributes = bun.c.GetFileAttributesW(src); - if (attributes == bun.c.INVALID_FILE_ATTRIBUTES) { + const attributes = c.GetFileAttributesW(src); + if (attributes == c.INVALID_FILE_ATTRIBUTES) { return .{ .err = .{ - .errno = @intFromEnum(C.SystemErrno.ENOENT), + .errno = @intFromEnum(SystemErrno.ENOENT), .syscall = .copyfile, .path = this.osPathIntoSyncErrorBuf(src), } }; } - if ((attributes & bun.c.FILE_ATTRIBUTE_DIRECTORY) == 0) { + if ((attributes & c.FILE_ATTRIBUTE_DIRECTORY) == 0) { const r = this._copySingleFileSync( src, dest, - @enumFromInt(if (cp_flags.errorOnExist or !cp_flags.force) Constants.COPYFILE_EXCL else @as(u8, 0)), + @enumFromInt(if (cp_flags.errorOnExist or !cp_flags.force) constants.COPYFILE_EXCL else @as(u8, 0)), attributes, args, ); @@ -6075,7 +6032,7 @@ pub const NodeFS = struct { const r = this._copySingleFileSync( src, dest, - @enumFromInt((if (cp_flags.errorOnExist or !cp_flags.force) Constants.COPYFILE_EXCL else @as(u8, 0))), + @enumFromInt((if (cp_flags.errorOnExist or !cp_flags.force) constants.COPYFILE_EXCL else @as(u8, 0))), stat_, args, ); @@ -6095,7 +6052,7 @@ pub const NodeFS = struct { } if (comptime Environment.isMac) try_with_clonefile: { - if (Maybe(Return.Cp).errnoSysP(C.clonefile(src, dest, 0), .clonefile, src)) |err| { + if (Maybe(Return.Cp).errnoSysP(c.clonefile(src, dest, 0), .clonefile, src)) |err| { switch (err.getErrno()) { .NAMETOOLONG, .ROFS, .INVAL, .ACCES, .PERM => |errno| { if (errno == .ACCES or errno == .PERM) { @@ -6174,7 +6131,7 @@ pub const NodeFS = struct { const r = this._copySingleFileSync( src_buf[0 .. src_dir_len + 1 + name_slice.len :0], dest_buf[0 .. dest_dir_len + 1 + name_slice.len :0], - @enumFromInt((if (cp_flags.errorOnExist or !cp_flags.force) Constants.COPYFILE_EXCL else @as(u8, 0))), + @enumFromInt((if (cp_flags.errorOnExist or !cp_flags.force) constants.COPYFILE_EXCL else @as(u8, 0))), null, args, ); @@ -6232,7 +6189,7 @@ pub const NodeFS = struct { this: *NodeFS, src: bun.OSPathSliceZ, dest: bun.OSPathSliceZ, - mode: Constants.Copyfile, + mode: constants.Copyfile, /// Stat on posix, file attributes on windows reuse_stat: ?if (Environment.isWindows) windows.DWORD else std.posix.Stat, args: Arguments.Cp, @@ -6243,7 +6200,7 @@ pub const NodeFS = struct { if (Environment.isMac) { if (mode.isForceClone()) { // https://www.manpagez.com/man/2/clonefile/ - return ret.errnoSysP(C.clonefile(src, dest, 0), .clonefile, src) orelse ret.success; + return ret.errnoSysP(c.clonefile(src, dest, 0), .clonefile, src) orelse ret.success; } else { const stat_ = reuse_stat orelse switch (Syscall.lstat(src)) { .result => |result| result, @@ -6255,16 +6212,16 @@ pub const NodeFS = struct { if (!posix.S.ISREG(stat_.mode)) { if (posix.S.ISLNK(stat_.mode)) { - var mode_: u32 = C.darwin.COPYFILE_ACL | C.darwin.COPYFILE_DATA | C.darwin.COPYFILE_NOFOLLOW_SRC; + var mode_: u32 = c.COPYFILE_ACL | c.COPYFILE_DATA | c.COPYFILE_NOFOLLOW_SRC; if (mode.shouldntOverwrite()) { - mode_ |= C.darwin.COPYFILE_EXCL; + mode_ |= c.COPYFILE_EXCL; } - return ret.errnoSysP(C.copyfile(src, dest, null, mode_), .copyfile, src) orelse ret.success; + return ret.errnoSysP(c.copyfile(src, dest, null, mode_), .copyfile, src) orelse ret.success; } @memcpy(this.sync_error_buf[0..src.len], src); return Maybe(Return.CopyFile){ .err = .{ - .errno = @intFromEnum(C.SystemErrno.ENOTSUP), + .errno = @intFromEnum(SystemErrno.ENOTSUP), .path = this.sync_error_buf[0..src.len], .syscall = .copyfile, } }; @@ -6278,7 +6235,7 @@ pub const NodeFS = struct { _ = Syscall.unlink(dest); } - if (ret.errnoSysP(C.clonefile(src, dest, 0), .clonefile, src) == null) { + if (ret.errnoSysP(c.clonefile(src, dest, 0), .clonefile, src) == null) { _ = Syscall.chmod(dest, stat_.mode); return ret.success; } @@ -6301,7 +6258,7 @@ pub const NodeFS = struct { } const dest_fd = dest_fd: { - switch (Syscall.open(dest, flags, JSC.Node.default_permission)) { + switch (Syscall.open(dest, flags, JSC.Node.fs.default_permission)) { .result => |result| break :dest_fd result, .err => |err| { if (err.getErrno() == .NOENT) { @@ -6318,7 +6275,7 @@ pub const NodeFS = struct { return Maybe(Return.CopyFile){ .err = mkdirResult.err }; } - switch (Syscall.open(dest, flags, JSC.Node.default_permission)) { + switch (Syscall.open(dest, flags, JSC.Node.fs.default_permission)) { .result => |result| break :dest_fd result, .err => {}, } @@ -6342,15 +6299,15 @@ pub const NodeFS = struct { // we fallback to copyfile() when the file is > 128 KB and clonefile fails // clonefile() isn't supported on all devices // nor is it supported across devices - var mode_: u32 = C.darwin.COPYFILE_ACL | C.darwin.COPYFILE_DATA | C.darwin.COPYFILE_NOFOLLOW_SRC; + var mode_: u32 = c.COPYFILE_ACL | c.COPYFILE_DATA | c.COPYFILE_NOFOLLOW_SRC; if (mode.shouldntOverwrite()) { - mode_ |= C.darwin.COPYFILE_EXCL; + mode_ |= c.COPYFILE_EXCL; } - const first_try = ret.errnoSysP(C.copyfile(src, dest, null, mode_), .copyfile, src) orelse return ret.success; - if (first_try == .err and first_try.err.errno == @intFromEnum(C.E.NOENT)) { + const first_try = ret.errnoSysP(c.copyfile(src, dest, null, mode_), .copyfile, src) orelse return ret.success; + if (first_try == .err and first_try.err.errno == @intFromEnum(Syscall.E.NOENT)) { bun.makePath(std.fs.cwd(), bun.path.dirname(dest, .auto)) catch {}; - return ret.errnoSysP(C.copyfile(src, dest, null, mode_), .copyfile, src) orelse ret.success; + return ret.errnoSysP(c.copyfile(src, dest, null, mode_), .copyfile, src) orelse ret.success; } return first_try; } @@ -6384,7 +6341,7 @@ pub const NodeFS = struct { if (!posix.S.ISREG(stat_.mode)) { return Maybe(Return.CopyFile){ .err = .{ - .errno = @intFromEnum(C.SystemErrno.ENOTSUP), + .errno = @intFromEnum(SystemErrno.ENOTSUP), .syscall = .copyfile, } }; } @@ -6396,7 +6353,7 @@ pub const NodeFS = struct { } const dest_fd = dest_fd: { - switch (Syscall.open(dest, flags, JSC.Node.default_permission)) { + switch (Syscall.open(dest, flags, JSC.Node.fs.default_permission)) { .result => |result| break :dest_fd result, .err => |err| { if (err.getErrno() == .NOENT) { @@ -6413,7 +6370,7 @@ pub const NodeFS = struct { return Maybe(Return.CopyFile){ .err = mkdirResult.err }; } - switch (Syscall.open(dest, flags, JSC.Node.default_permission)) { + switch (Syscall.open(dest, flags, JSC.Node.fs.default_permission)) { .result => |result| break :dest_fd result, .err => {}, } @@ -6428,7 +6385,7 @@ pub const NodeFS = struct { var size: usize = @intCast(@max(stat_.size, 0)); if (posix.S.ISREG(stat_.mode) and bun.can_use_ioctl_ficlone()) { - const rc = bun.C.linux.ioctl_ficlone(dest_fd, src_fd); + const rc = bun.linux.ioctl_ficlone(dest_fd, src_fd); if (rc == 0) { _ = Syscall.fchmod(dest_fd, stat_.mode); dest_fd.close(); @@ -6501,12 +6458,12 @@ pub const NodeFS = struct { if (Environment.isWindows) { const src_enoent_maybe = ret.initErrWithP(.ENOENT, .copyfile, this.osPathIntoSyncErrorBuf(src)); const dst_enoent_maybe = ret.initErrWithP(.ENOENT, .copyfile, this.osPathIntoSyncErrorBuf(dest)); - const stat_ = reuse_stat orelse switch (bun.c.GetFileAttributesW(src)) { - bun.c.INVALID_FILE_ATTRIBUTES => return ret.errnoSysP(0, .copyfile, this.osPathIntoSyncErrorBuf(src)).?, + const stat_ = reuse_stat orelse switch (c.GetFileAttributesW(src)) { + c.INVALID_FILE_ATTRIBUTES => return ret.errnoSysP(0, .copyfile, this.osPathIntoSyncErrorBuf(src)).?, else => |result| result, }; - if (stat_ & bun.c.FILE_ATTRIBUTE_REPARSE_POINT == 0) { - if (bun.c.CopyFileW(src, dest, @intFromBool(mode.shouldntOverwrite())) == 0) { + if (stat_ & c.FILE_ATTRIBUTE_REPARSE_POINT == 0) { + if (c.CopyFileW(src, dest, @intFromBool(mode.shouldntOverwrite())) == 0) { var err = windows.GetLastError(); var errpath: bun.OSPathSliceZ = undefined; switch (err) { @@ -6948,3 +6905,47 @@ fn zigDeleteTreeMinStackSizeWithKindHint(self: std.fs.Dir, sub_path: []const u8, } } } + +const std = @import("std"); +const bun = @import("bun"); +const strings = bun.strings; +const windows = bun.windows; +const c = bun.c; +const E = bun.sys.E; +const string = bun.string; +const JSC = bun.JSC; +const PathString = bun.PathString; +const Environment = bun.Environment; +const system = std.posix.system; +const Maybe = JSC.Maybe; +const Encoding = JSC.Node.Encoding; +const PosixToWinNormalizer = bun.path.PosixToWinNormalizer; + +const FileDescriptor = bun.FileDescriptor; +const FD = bun.FD; + +const AbortSignal = bun.webcore.AbortSignal; + +const Syscall = if (Environment.isWindows) bun.sys.sys_uv else bun.sys; + +const builtin = @import("builtin"); +const posix = std.posix; +const darwin = std.os.darwin; +const linux = std.os.linux; +const PathLike = JSC.Node.PathLike; +const PathOrFileDescriptor = JSC.Node.PathOrFileDescriptor; +const DirIterator = @import("./dir_iterator.zig"); +const Path = @import("../../resolver/resolve_path.zig"); +const FileSystem = @import("../../fs.zig").FileSystem; +const ArgumentsSlice = JSC.CallFrame.ArgumentsSlice; +const TimeLike = JSC.Node.TimeLike; +const Mode = bun.Mode; +const uv = bun.windows.libuv; +const uid_t = JSC.Node.uid_t; +const gid_t = JSC.Node.gid_t; +const ReadPosition = i64; +const StringOrBuffer = JSC.Node.StringOrBuffer; +const NodeFSFunctionEnum = std.meta.DeclEnum(NodeFS); +const UvFsCallback = fn (*uv.fs_t) callconv(.C) void; + +const SystemErrno = bun.sys.SystemErrno; diff --git a/src/bun.js/node/node_fs_binding.zig b/src/bun.js/node/node_fs_binding.zig index 27c0bd5fb6..565315a15c 100644 --- a/src/bun.js/node/node_fs_binding.zig +++ b/src/bun.js/node/node_fs_binding.zig @@ -2,22 +2,22 @@ const bun = @import("bun"); const JSC = bun.JSC; const std = @import("std"); const Flavor = JSC.Node.Flavor; -const ArgumentsSlice = JSC.Node.ArgumentsSlice; +const ArgumentsSlice = JSC.CallFrame.ArgumentsSlice; const system = std.posix.system; const Maybe = JSC.Maybe; const Encoding = JSC.Node.Encoding; const FeatureFlags = bun.FeatureFlags; -const Args = JSC.Node.NodeFS.Arguments; +const Args = JSC.Node.fs.NodeFS.Arguments; const d = JSC.d; -const NodeFSFunction = fn (this: *JSC.Node.NodeJSFS, globalObject: *JSC.JSGlobalObject, callframe: *JSC.CallFrame) bun.JSError!JSC.JSValue; +const NodeFSFunction = fn (this: *JSC.Node.fs.Binding, globalObject: *JSC.JSGlobalObject, callframe: *JSC.CallFrame) bun.JSError!JSC.JSValue; -const NodeFSFunctionEnum = std.meta.DeclEnum(JSC.Node.NodeFS); +const NodeFSFunctionEnum = std.meta.DeclEnum(node.fs.NodeFS); -/// Returns bindings to call JSC.Node.NodeFS.. +/// Returns bindings to call JSC.Node.fs.NodeFS.. /// Async calls use a thread pool. fn Bindings(comptime function_name: NodeFSFunctionEnum) type { - const function = @field(JSC.Node.NodeFS, @tagName(function_name)); + const function = @field(JSC.Node.fs.NodeFS, @tagName(function_name)); const fn_info = @typeInfo(@TypeOf(function)).@"fn"; if (fn_info.params.len != 3) { @compileError("Expected fn(NodeFS, Arguments) Return for NodeFS." ++ @tagName(function_name)); @@ -25,7 +25,7 @@ fn Bindings(comptime function_name: NodeFSFunctionEnum) type { const Arguments = fn_info.params[1].type.?; return struct { - pub fn runSync(this: *JSC.Node.NodeJSFS, globalObject: *JSC.JSGlobalObject, callframe: *JSC.CallFrame) bun.JSError!JSC.JSValue { + pub fn runSync(this: *Binding, globalObject: *JSC.JSGlobalObject, callframe: *JSC.CallFrame) bun.JSError!JSC.JSValue { var slice = ArgumentsSlice.init(globalObject.bunVM(), callframe.arguments()); defer slice.deinit(); @@ -46,7 +46,7 @@ fn Bindings(comptime function_name: NodeFSFunctionEnum) type { }; } - pub fn runAsync(this: *JSC.Node.NodeJSFS, globalObject: *JSC.JSGlobalObject, callframe: *JSC.CallFrame) bun.JSError!JSC.JSValue { + pub fn runAsync(this: *Binding, globalObject: *JSC.JSGlobalObject, callframe: *JSC.CallFrame) bun.JSError!JSC.JSValue { var slice = ArgumentsSlice.init(globalObject.bunVM(), callframe.arguments()); slice.will_be_async = true; var deinit = false; @@ -75,10 +75,10 @@ fn Bindings(comptime function_name: NodeFSFunctionEnum) type { } } - const Task = @field(JSC.Node.Async, @tagName(function_name)); + const Task = @field(node.fs.Async, @tagName(function_name)); switch (comptime function_name) { .cp => return Task.create(globalObject, this, args, globalObject.bunVM(), slice.arena), - .readdir => if (args.recursive) return JSC.Node.Async.readdir_recursive.create(globalObject, args, globalObject.bunVM()), + .readdir => if (args.recursive) return node.fs.AsyncReaddirRecursiveTask.create(globalObject, args, globalObject.bunVM()), else => {}, } return Task.create(globalObject, this, args, globalObject.bunVM()); @@ -93,8 +93,8 @@ fn callSync(comptime FunctionEnum: NodeFSFunctionEnum) NodeFSFunction { return Bindings(FunctionEnum).runSync; } -pub const NodeJSFS = struct { - node_fs: JSC.Node.NodeFS = .{}, +pub const Binding = struct { + node_fs: node.fs.NodeFS = .{}, pub const js = JSC.Codegen.JSNodeJSFS; pub const toJS = js.toJS; @@ -103,7 +103,7 @@ pub const NodeJSFS = struct { pub const new = bun.TrivialNew(@This()); - pub fn finalize(this: *JSC.Node.NodeJSFS) void { + pub fn finalize(this: *Binding) void { if (this.node_fs.vm) |vm| { if (vm.node_fs == &this.node_fs) { return; @@ -113,11 +113,11 @@ pub const NodeJSFS = struct { bun.destroy(this); } - pub fn getDirent(_: *NodeJSFS, globalThis: *JSC.JSGlobalObject) JSC.JSValue { + pub fn getDirent(_: *Binding, globalThis: *JSC.JSGlobalObject) JSC.JSValue { return JSC.Node.Dirent.getConstructor(globalThis); } - pub fn getStats(_: *NodeJSFS, globalThis: *JSC.JSGlobalObject) JSC.JSValue { + pub fn getStats(_: *Binding, globalThis: *JSC.JSGlobalObject) JSC.JSValue { return JSC.Node.StatsSmall.getConstructor(globalThis); } @@ -213,7 +213,7 @@ pub const NodeJSFS = struct { }; pub fn createBinding(globalObject: *JSC.JSGlobalObject) JSC.JSValue { - const module = NodeJSFS.new(.{}); + const module = Binding.new(.{}); const vm = globalObject.bunVM(); module.node_fs.vm = vm; @@ -243,3 +243,5 @@ pub fn createMemfdForTesting(globalObject: *JSC.JSGlobalObject, callFrame: *JSC. }, } } + +const node = bun.api.node; diff --git a/src/bun.js/node/node_fs_constant.zig b/src/bun.js/node/node_fs_constant.zig index 321b25bbed..ef7f55a1c1 100644 --- a/src/bun.js/node/node_fs_constant.zig +++ b/src/bun.js/node/node_fs_constant.zig @@ -9,136 +9,134 @@ fn get(comptime name: []const u8) comptime_int { @compileError("Unknown Constant: " ++ name); } -pub const Constants = struct { - // File Access Constants - /// Constant for fs.access(). File is visible to the calling process. - pub const F_OK = std.posix.F_OK; - /// Constant for fs.access(). File can be read by the calling process. - pub const R_OK = std.posix.R_OK; - /// Constant for fs.access(). File can be written by the calling process. - pub const W_OK = std.posix.W_OK; - /// Constant for fs.access(). File can be executed by the calling process. - pub const X_OK = std.posix.X_OK; - // File Copy Constants - pub const Copyfile = enum(i32) { - _, - pub const exclusive = 1; - pub const clone = 2; - pub const force = 4; +// File Access Constants +/// Constant for fs.access(). File is visible to the calling process. +pub const F_OK = std.posix.F_OK; +/// Constant for fs.access(). File can be read by the calling process. +pub const R_OK = std.posix.R_OK; +/// Constant for fs.access(). File can be written by the calling process. +pub const W_OK = std.posix.W_OK; +/// Constant for fs.access(). File can be executed by the calling process. +pub const X_OK = std.posix.X_OK; +// File Copy Constants +pub const Copyfile = enum(i32) { + _, + pub const exclusive = 1; + pub const clone = 2; + pub const force = 4; - pub inline fn isForceClone(this: Copyfile) bool { - return (@intFromEnum(this) & COPYFILE_FICLONE_FORCE) != 0; - } + pub inline fn isForceClone(this: Copyfile) bool { + return (@intFromEnum(this) & COPYFILE_FICLONE_FORCE) != 0; + } - pub inline fn shouldntOverwrite(this: Copyfile) bool { - return (@intFromEnum(this) & COPYFILE_EXCL) != 0; - } + pub inline fn shouldntOverwrite(this: Copyfile) bool { + return (@intFromEnum(this) & COPYFILE_EXCL) != 0; + } - pub inline fn canUseClone(this: Copyfile) bool { - _ = this; - return Environment.isMac; - // return (@intFromEnum(this) | COPYFILE_FICLONE) != 0; - } - }; - - /// Constant for fs.copyFile. Flag indicating the destination file should not be overwritten if it already exists. - pub const COPYFILE_EXCL: i32 = Copyfile.exclusive; - /// - /// Constant for fs.copyFile. copy operation will attempt to create a copy-on-write reflink. - /// If the underlying platform does not support copy-on-write, then a fallback copy mechanism is used. - pub const COPYFILE_FICLONE: i32 = Copyfile.clone; - /// - /// Constant for fs.copyFile. Copy operation will attempt to create a copy-on-write reflink. - /// If the underlying platform does not support copy-on-write, then the operation will fail with an error. - pub const COPYFILE_FICLONE_FORCE: i32 = Copyfile.force; - // File Open Constants - /// Constant for fs.open(). Flag indicating to open a file for read-only access. - pub const O_RDONLY = bun.O.RDONLY; - /// Constant for fs.open(). Flag indicating to open a file for write-only access. - pub const O_WRONLY = bun.O.WRONLY; - /// Constant for fs.open(). Flag indicating to open a file for read-write access. - pub const O_RDWR = bun.O.RDWR; - /// Constant for fs.open(). Flag indicating to create the file if it does not already exist. - pub const O_CREAT = bun.O.CREAT; - /// Constant for fs.open(). Flag indicating that opening a file should fail if the O_CREAT flag is set and the file already exists. - pub const O_EXCL = bun.O.EXCL; - - /// - /// Constant for fs.open(). Flag indicating that if path identifies a terminal device, - /// opening the path shall not cause that terminal to become the controlling terminal for the process - /// (if the process does not already have one). - pub const O_NOCTTY = bun.O.NOCTTY; - /// Constant for fs.open(). Flag indicating that if the file exists and is a regular file, and the file is opened successfully for write access, its length shall be truncated to zero. - pub const O_TRUNC = bun.O.TRUNC; - /// Constant for fs.open(). Flag indicating that data will be appended to the end of the file. - pub const O_APPEND = bun.O.APPEND; - /// Constant for fs.open(). Flag indicating that the open should fail if the path is not a directory. - pub const O_DIRECTORY = bun.O.DIRECTORY; - - /// - /// constant for fs.open(). - /// Flag indicating reading accesses to the file system will no longer result in - /// an update to the atime information associated with the file. - /// This flag is available on Linux operating systems only. - pub const O_NOATIME = get("NOATIME"); - /// Constant for fs.open(). Flag indicating that the open should fail if the path is a symbolic link. - pub const O_NOFOLLOW = bun.O.NOFOLLOW; - /// Constant for fs.open(). Flag indicating that the file is opened for synchronous I/O. - pub const O_SYNC = bun.O.SYNC; - /// Constant for fs.open(). Flag indicating that the file is opened for synchronous I/O with write operations waiting for data integrity. - pub const O_DSYNC = bun.O.DSYNC; - /// Constant for fs.open(). Flag indicating to open the symbolic link itself rather than the resource it is pointing to. - pub const O_SYMLINK = get("SYMLINK"); - /// Constant for fs.open(). When set, an attempt will be made to minimize caching effects of file I/O. - pub const O_DIRECT = get("DIRECT"); - /// Constant for fs.open(). Flag indicating to open the file in nonblocking mode when possible. - pub const O_NONBLOCK = bun.O.NONBLOCK; - // File Type Constants - /// Constant for fs.Stats mode property for determining a file's type. Bit mask used to extract the file type code. - pub const S_IFMT = std.posix.S.IFMT; - /// Constant for fs.Stats mode property for determining a file's type. File type constant for a regular file. - pub const S_IFREG = std.posix.S.IFREG; - /// Constant for fs.Stats mode property for determining a file's type. File type constant for a directory. - pub const S_IFDIR = std.posix.S.IFDIR; - /// Constant for fs.Stats mode property for determining a file's type. File type constant for a character-oriented device file. - pub const S_IFCHR = std.posix.S.IFCHR; - /// Constant for fs.Stats mode property for determining a file's type. File type constant for a block-oriented device file. - pub const S_IFBLK = std.posix.S.IFBLK; - /// Constant for fs.Stats mode property for determining a file's type. File type constant for a FIFO/pipe. - pub const S_IFIFO = std.posix.S.IFIFO; - /// Constant for fs.Stats mode property for determining a file's type. File type constant for a symbolic link. - pub const S_IFLNK = std.posix.S.IFLNK; - /// Constant for fs.Stats mode property for determining a file's type. File type constant for a socket. - pub const S_IFSOCK = std.posix.S.IFSOCK; - // File Mode Constants - /// Constant for fs.Stats mode property for determining access permissions for a file. File mode indicating readable, writable and executable by owner. - pub const S_IRWXU = std.posix.S.IRWXU; - /// Constant for fs.Stats mode property for determining access permissions for a file. File mode indicating readable by owner. - pub const S_IRUSR = std.posix.S.IRUSR; - /// Constant for fs.Stats mode property for determining access permissions for a file. File mode indicating writable by owner. - pub const S_IWUSR = std.posix.S.IWUSR; - /// Constant for fs.Stats mode property for determining access permissions for a file. File mode indicating executable by owner. - pub const S_IXUSR = std.posix.S.IXUSR; - /// Constant for fs.Stats mode property for determining access permissions for a file. File mode indicating readable, writable and executable by group. - pub const S_IRWXG = std.posix.S.IRWXG; - /// Constant for fs.Stats mode property for determining access permissions for a file. File mode indicating readable by group. - pub const S_IRGRP = std.posix.S.IRGRP; - /// Constant for fs.Stats mode property for determining access permissions for a file. File mode indicating writable by group. - pub const S_IWGRP = std.posix.S.IWGRP; - /// Constant for fs.Stats mode property for determining access permissions for a file. File mode indicating executable by group. - pub const S_IXGRP = std.posix.S.IXGRP; - /// Constant for fs.Stats mode property for determining access permissions for a file. File mode indicating readable, writable and executable by others. - pub const S_IRWXO = std.posix.S.IRWXO; - /// Constant for fs.Stats mode property for determining access permissions for a file. File mode indicating readable by others. - pub const S_IROTH = std.posix.S.IROTH; - /// Constant for fs.Stats mode property for determining access permissions for a file. File mode indicating writable by others. - pub const S_IWOTH = std.posix.S.IWOTH; - /// Constant for fs.Stats mode property for determining access permissions for a file. File mode indicating executable by others. - pub const S_IXOTH = std.posix.S.IXOTH; - - /// - /// When set, a memory file mapping is used to access the file. This flag - /// is available on Windows operating systems only. On other operating systems, - /// this flag is ignored. - pub const UV_FS_O_FILEMAP = 536870912; + pub inline fn canUseClone(this: Copyfile) bool { + _ = this; + return Environment.isMac; + // return (@intFromEnum(this) | COPYFILE_FICLONE) != 0; + } }; + +/// Constant for fs.copyFile. Flag indicating the destination file should not be overwritten if it already exists. +pub const COPYFILE_EXCL: i32 = Copyfile.exclusive; +/// +/// Constant for fs.copyFile. copy operation will attempt to create a copy-on-write reflink. +/// If the underlying platform does not support copy-on-write, then a fallback copy mechanism is used. +pub const COPYFILE_FICLONE: i32 = Copyfile.clone; +/// +/// Constant for fs.copyFile. Copy operation will attempt to create a copy-on-write reflink. +/// If the underlying platform does not support copy-on-write, then the operation will fail with an error. +pub const COPYFILE_FICLONE_FORCE: i32 = Copyfile.force; +// File Open Constants +/// Constant for fs.open(). Flag indicating to open a file for read-only access. +pub const O_RDONLY = bun.O.RDONLY; +/// Constant for fs.open(). Flag indicating to open a file for write-only access. +pub const O_WRONLY = bun.O.WRONLY; +/// Constant for fs.open(). Flag indicating to open a file for read-write access. +pub const O_RDWR = bun.O.RDWR; +/// Constant for fs.open(). Flag indicating to create the file if it does not already exist. +pub const O_CREAT = bun.O.CREAT; +/// Constant for fs.open(). Flag indicating that opening a file should fail if the O_CREAT flag is set and the file already exists. +pub const O_EXCL = bun.O.EXCL; + +/// +/// Constant for fs.open(). Flag indicating that if path identifies a terminal device, +/// opening the path shall not cause that terminal to become the controlling terminal for the process +/// (if the process does not already have one). +pub const O_NOCTTY = bun.O.NOCTTY; +/// Constant for fs.open(). Flag indicating that if the file exists and is a regular file, and the file is opened successfully for write access, its length shall be truncated to zero. +pub const O_TRUNC = bun.O.TRUNC; +/// Constant for fs.open(). Flag indicating that data will be appended to the end of the file. +pub const O_APPEND = bun.O.APPEND; +/// Constant for fs.open(). Flag indicating that the open should fail if the path is not a directory. +pub const O_DIRECTORY = bun.O.DIRECTORY; + +/// +/// constant for fs.open(). +/// Flag indicating reading accesses to the file system will no longer result in +/// an update to the atime information associated with the file. +/// This flag is available on Linux operating systems only. +pub const O_NOATIME = get("NOATIME"); +/// Constant for fs.open(). Flag indicating that the open should fail if the path is a symbolic link. +pub const O_NOFOLLOW = bun.O.NOFOLLOW; +/// Constant for fs.open(). Flag indicating that the file is opened for synchronous I/O. +pub const O_SYNC = bun.O.SYNC; +/// Constant for fs.open(). Flag indicating that the file is opened for synchronous I/O with write operations waiting for data integrity. +pub const O_DSYNC = bun.O.DSYNC; +/// Constant for fs.open(). Flag indicating to open the symbolic link itself rather than the resource it is pointing to. +pub const O_SYMLINK = get("SYMLINK"); +/// Constant for fs.open(). When set, an attempt will be made to minimize caching effects of file I/O. +pub const O_DIRECT = get("DIRECT"); +/// Constant for fs.open(). Flag indicating to open the file in nonblocking mode when possible. +pub const O_NONBLOCK = bun.O.NONBLOCK; +// File Type Constants +/// Constant for fs.Stats mode property for determining a file's type. Bit mask used to extract the file type code. +pub const S_IFMT = std.posix.S.IFMT; +/// Constant for fs.Stats mode property for determining a file's type. File type constant for a regular file. +pub const S_IFREG = std.posix.S.IFREG; +/// Constant for fs.Stats mode property for determining a file's type. File type constant for a directory. +pub const S_IFDIR = std.posix.S.IFDIR; +/// Constant for fs.Stats mode property for determining a file's type. File type constant for a character-oriented device file. +pub const S_IFCHR = std.posix.S.IFCHR; +/// Constant for fs.Stats mode property for determining a file's type. File type constant for a block-oriented device file. +pub const S_IFBLK = std.posix.S.IFBLK; +/// Constant for fs.Stats mode property for determining a file's type. File type constant for a FIFO/pipe. +pub const S_IFIFO = std.posix.S.IFIFO; +/// Constant for fs.Stats mode property for determining a file's type. File type constant for a symbolic link. +pub const S_IFLNK = std.posix.S.IFLNK; +/// Constant for fs.Stats mode property for determining a file's type. File type constant for a socket. +pub const S_IFSOCK = std.posix.S.IFSOCK; +// File Mode Constants +/// Constant for fs.Stats mode property for determining access permissions for a file. File mode indicating readable, writable and executable by owner. +pub const S_IRWXU = std.posix.S.IRWXU; +/// Constant for fs.Stats mode property for determining access permissions for a file. File mode indicating readable by owner. +pub const S_IRUSR = std.posix.S.IRUSR; +/// Constant for fs.Stats mode property for determining access permissions for a file. File mode indicating writable by owner. +pub const S_IWUSR = std.posix.S.IWUSR; +/// Constant for fs.Stats mode property for determining access permissions for a file. File mode indicating executable by owner. +pub const S_IXUSR = std.posix.S.IXUSR; +/// Constant for fs.Stats mode property for determining access permissions for a file. File mode indicating readable, writable and executable by group. +pub const S_IRWXG = std.posix.S.IRWXG; +/// Constant for fs.Stats mode property for determining access permissions for a file. File mode indicating readable by group. +pub const S_IRGRP = std.posix.S.IRGRP; +/// Constant for fs.Stats mode property for determining access permissions for a file. File mode indicating writable by group. +pub const S_IWGRP = std.posix.S.IWGRP; +/// Constant for fs.Stats mode property for determining access permissions for a file. File mode indicating executable by group. +pub const S_IXGRP = std.posix.S.IXGRP; +/// Constant for fs.Stats mode property for determining access permissions for a file. File mode indicating readable, writable and executable by others. +pub const S_IRWXO = std.posix.S.IRWXO; +/// Constant for fs.Stats mode property for determining access permissions for a file. File mode indicating readable by others. +pub const S_IROTH = std.posix.S.IROTH; +/// Constant for fs.Stats mode property for determining access permissions for a file. File mode indicating writable by others. +pub const S_IWOTH = std.posix.S.IWOTH; +/// Constant for fs.Stats mode property for determining access permissions for a file. File mode indicating executable by others. +pub const S_IXOTH = std.posix.S.IXOTH; + +/// +/// When set, a memory file mapping is used to access the file. This flag +/// is available on Windows operating systems only. On other operating systems, +/// this flag is ignored. +pub const UV_FS_O_FILEMAP = 536870912; diff --git a/src/bun.js/node/node_fs_stat_watcher.zig b/src/bun.js/node/node_fs_stat_watcher.zig index 2c03d64fc4..dec326a95f 100644 --- a/src/bun.js/node/node_fs_stat_watcher.zig +++ b/src/bun.js/node/node_fs_stat_watcher.zig @@ -3,7 +3,7 @@ const JSC = bun.JSC; const bun = @import("bun"); const Fs = @import("../../fs.zig"); const Path = @import("../../resolver/resolve_path.zig"); -const Encoder = JSC.WebCore.Encoder; +const Encoder = JSC.WebCore.encoding; const Mutex = bun.Mutex; const uws = @import("../../deps/uws.zig"); @@ -13,7 +13,7 @@ const EventLoopTimer = @import("../api/Timer.zig").EventLoopTimer; const VirtualMachine = JSC.VirtualMachine; const EventLoop = JSC.EventLoop; const PathLike = JSC.Node.PathLike; -const ArgumentsSlice = JSC.Node.ArgumentsSlice; +const ArgumentsSlice = JSC.CallFrame.ArgumentsSlice; const Output = bun.Output; const string = bun.string; const StoredFileDescriptorType = bun.StoredFileDescriptorType; @@ -173,7 +173,10 @@ pub const StatWatcherScheduler = struct { } }; +// TODO: make this a top-level struct pub const StatWatcher = struct { + pub const Scheduler = StatWatcherScheduler; + next: ?*StatWatcher = null, ctx: *VirtualMachine, diff --git a/src/bun.js/node/node_fs_watcher.zig b/src/bun.js/node/node_fs_watcher.zig index fc5798a104..618efcb055 100644 --- a/src/bun.js/node/node_fs_watcher.zig +++ b/src/bun.js/node/node_fs_watcher.zig @@ -3,13 +3,13 @@ const JSC = bun.JSC; const bun = @import("bun"); const Fs = @import("../../fs.zig"); const Path = @import("../../resolver/resolve_path.zig"); -const Encoder = JSC.WebCore.Encoder; +const Encoder = JSC.WebCore.encoding; const Mutex = bun.Mutex; const VirtualMachine = JSC.VirtualMachine; const EventLoop = JSC.EventLoop; const PathLike = JSC.Node.PathLike; -const ArgumentsSlice = JSC.Node.ArgumentsSlice; +const ArgumentsSlice = JSC.CallFrame.ArgumentsSlice; const Output = bun.Output; const string = bun.string; const StoredFileDescriptorType = bun.StoredFileDescriptorType; @@ -17,7 +17,9 @@ const Environment = bun.Environment; const Async = bun.Async; const log = Output.scoped(.@"fs.watch", true); const PathWatcher = if (Environment.isWindows) @import("./win_watcher.zig") else @import("./path_watcher.zig"); +const webcore = bun.webcore; +// TODO: make this a top-level struct pub const FSWatcher = struct { pub const js = JSC.Codegen.JSFSWatcher; pub const toJS = js.toJS; @@ -28,7 +30,7 @@ pub const FSWatcher = struct { verbose: bool = false, mutex: Mutex, - signal: ?*JSC.AbortSignal, + signal: ?*webcore.AbortSignal, persistent: bool, path_watcher: ?*PathWatcher.PathWatcher, poll_ref: Async.KeepAlive = .{}, @@ -200,7 +202,7 @@ pub const FSWatcher = struct { }; pub const FSWatchTaskWindows = struct { - event: Event = .{ .@"error" = .{ .errno = @intFromEnum(bun.C.SystemErrno.EINVAL), .syscall = .watch } }, + event: Event = .{ .@"error" = .{ .errno = @intFromEnum(bun.sys.SystemErrno.EINVAL), .syscall = .watch } }, ctx: *FSWatcher, /// Unused: To match the API of the posix version @@ -337,7 +339,7 @@ pub const FSWatcher = struct { path: PathLike, listener: JSC.JSValue, global_this: *JSC.JSGlobalObject, - signal: ?*JSC.AbortSignal, + signal: ?*webcore.AbortSignal, persistent: bool, recursive: bool, encoding: JSC.Node.Encoding, @@ -351,7 +353,7 @@ pub const FSWatcher = struct { defer if (should_deinit_path) path.deinit(); var listener: JSC.JSValue = .zero; - var signal: ?*JSC.AbortSignal = null; + var signal: ?*webcore.AbortSignal = null; var persistent: bool = true; var recursive: bool = false; var encoding: JSC.Node.Encoding = .utf8; @@ -387,7 +389,7 @@ pub const FSWatcher = struct { // abort signal if (try options_or_callable.getTruthy(ctx, "signal")) |signal_| { - if (JSC.AbortSignal.fromJS(signal_)) |signal_obj| { + if (webcore.AbortSignal.fromJS(signal_)) |signal_obj| { //Keep it alive signal_.ensureStillAlive(); signal = signal_obj; diff --git a/src/bun.js/node/node_os.zig b/src/bun.js/node/node_os.zig index 8a95333725..f5790e3613 100644 --- a/src/bun.js/node/node_os.zig +++ b/src/bun.js/node/node_os.zig @@ -1,7 +1,6 @@ const std = @import("std"); const builtin = @import("builtin"); const bun = @import("bun"); -const C = bun.C; const string = bun.string; const strings = bun.strings; const JSC = bun.JSC; @@ -9,6 +8,8 @@ const Environment = bun.Environment; const Global = bun.Global; const libuv = bun.windows.libuv; const gen = bun.gen.node_os; +const sys = bun.sys; +const c = bun.c; pub fn createNodeOsBinding(global: *JSC.JSGlobalObject) JSC.JSValue { return JSC.JSObject.create(.{ @@ -186,20 +187,23 @@ fn cpusImplLinux(globalThis: *JSC.JSGlobalObject) !JSC.JSValue { extern fn bun_sysconf__SC_CLK_TCK() isize; fn cpusImplDarwin(globalThis: *JSC.JSGlobalObject) !JSC.JSValue { - const local_bindings = @import("../../darwin_c.zig"); - const c = std.c; - // Fetch the CPU info structure var num_cpus: c.natural_t = 0; - var info: [*]local_bindings.processor_cpu_load_info = undefined; + var info: [*]bun.c.processor_cpu_load_info = undefined; var info_size: std.c.mach_msg_type_number_t = 0; - if (local_bindings.host_processor_info(std.c.mach_host_self(), local_bindings.PROCESSOR_CPU_LOAD_INFO, &num_cpus, @as(*local_bindings.processor_info_array_t, @ptrCast(&info)), &info_size) != .SUCCESS) { + if (bun.c.host_processor_info( + std.c.mach_host_self(), + bun.c.PROCESSOR_CPU_LOAD_INFO, + &num_cpus, + @as(*bun.c.processor_info_array_t, @ptrCast(&info)), + &info_size, + ) != 0) { return error.no_processor_info; } defer _ = std.c.vm_deallocate(std.c.mach_task_self(), @intFromPtr(info), info_size); // Ensure we got the amount of data we expected to guard against buffer overruns - if (info_size != C.PROCESSOR_CPU_LOAD_INFO_COUNT * num_cpus) { + if (info_size != bun.c.PROCESSOR_CPU_LOAD_INFO_COUNT * num_cpus) { return error.broken_process_info; } @@ -286,11 +290,16 @@ pub fn cpusImplWindows(globalThis: *JSC.JSGlobalObject) !JSC.JSValue { } pub fn freemem() u64 { - return C.getFreeMemory(); + // OsBinding.cpp + return @extern(*const fn () callconv(.C) u64, .{ + .name = "Bun__Os__getFreeMemory", + })(); } +extern fn get_process_priority(pid: i32) i32; pub fn getPriority(global: *JSC.JSGlobalObject, pid: i32) bun.JSError!i32 { - return C.getProcessPriority(pid) orelse { + const result = get_process_priority(pid); + if (result == std.math.maxInt(i32)) { const err = JSC.SystemError{ .message = bun.String.static("no such process"), .code = bun.String.static("ESRCH"), @@ -301,7 +310,8 @@ pub fn getPriority(global: *JSC.JSGlobalObject, pid: i32) bun.JSError!i32 { .syscall = bun.String.static("uv_os_getpriority"), }; return global.throwValue(err.toErrorInstanceWithInfoObject(global)); - }; + } + return result; } pub fn homedir(global: *JSC.JSGlobalObject) !bun.String { @@ -333,23 +343,23 @@ pub fn homedir(global: *JSC.JSGlobalObject) !bun.String { defer if (string_bytes.ptr != &stack_string_bytes) bun.default_allocator.free(string_bytes); - var pw: bun.C.passwd = undefined; - var result: ?*bun.C.passwd = null; + var pw: bun.c.passwd = undefined; + var result: ?*bun.c.passwd = null; const ret = while (true) { - const ret = bun.C.getpwuid_r( - bun.C.geteuid(), + const ret = bun.c.getpwuid_r( + bun.c.geteuid(), &pw, string_bytes.ptr, string_bytes.len, &result, ); - if (ret == @intFromEnum(bun.C.E.INTR)) + if (ret == @intFromEnum(bun.sys.E.INTR)) continue; // If the system call wants more memory, double it. - if (ret == @intFromEnum(bun.C.E.RANGE)) { + if (ret == @intFromEnum(bun.sys.E.RANGE)) { const len = string_bytes.len; bun.default_allocator.free(string_bytes); string_bytes = ""; @@ -408,7 +418,43 @@ pub fn hostname(global: *JSC.JSGlobalObject) bun.JSError!JSC.JSValue { } pub fn loadavg(global: *JSC.JSGlobalObject) bun.JSError!JSC.JSValue { - const result = C.getSystemLoadavg(); + const result = switch (bun.Environment.os) { + .mac => loadavg: { + var avg: c.struct_loadavg = undefined; + var size: usize = @sizeOf(@TypeOf(avg)); + + std.posix.sysctlbynameZ( + "vm.loadavg", + &avg, + &size, + null, + 0, + ) catch |err| switch (err) { + else => break :loadavg [3]f64{ 0, 0, 0 }, + }; + + const scale: f64 = @floatFromInt(avg.fscale); + break :loadavg .{ + if (scale == 0.0) 0 else @as(f64, @floatFromInt(avg.ldavg[0])) / scale, + if (scale == 0.0) 0 else @as(f64, @floatFromInt(avg.ldavg[1])) / scale, + if (scale == 0.0) 0 else @as(f64, @floatFromInt(avg.ldavg[2])) / scale, + }; + }, + .linux => loadavg: { + var info: c.struct_sysinfo = undefined; + if (c.sysinfo(&info) == @as(c_int, 0)) { + break :loadavg [3]f64{ + std.math.ceil((@as(f64, @floatFromInt(info.loads[0])) / 65536.0) * 100.0) / 100.0, + std.math.ceil((@as(f64, @floatFromInt(info.loads[1])) / 65536.0) * 100.0) / 100.0, + std.math.ceil((@as(f64, @floatFromInt(info.loads[2])) / 65536.0) * 100.0) / 100.0, + }; + } + break :loadavg [3]f64{ 0, 0, 0 }; + }, + .windows => .{ 0, 0, 0 }, + .wasm => @compileError("TODO"), + }; + return JSC.JSArray.create(global, &.{ JSC.JSValue.jsNumber(result[0]), JSC.JSValue.jsNumber(result[1]), @@ -424,8 +470,8 @@ pub const networkInterfaces = switch (Environment.os) { fn networkInterfacesPosix(globalThis: *JSC.JSGlobalObject) bun.JSError!JSC.JSValue { // getifaddrs sets a pointer to a linked list - var interface_start: ?*C.ifaddrs = null; - const rc = C.getifaddrs(&interface_start); + var interface_start: ?*c.ifaddrs = null; + const rc = c.getifaddrs(&interface_start); if (rc != 0) { const err = JSC.SystemError{ .message = bun.String.static("A system error occurred: getifaddrs returned an error"), @@ -436,14 +482,14 @@ fn networkInterfacesPosix(globalThis: *JSC.JSGlobalObject) bun.JSError!JSC.JSVal return globalThis.throwValue(err.toErrorInstance(globalThis)); } - defer C.freeifaddrs(interface_start); + defer c.freeifaddrs(interface_start); const helpers = struct { // We'll skip interfaces that aren't actually available - pub fn skip(iface: *C.ifaddrs) bool { + pub fn skip(iface: *c.ifaddrs) bool { // Skip interfaces that aren't actually available - if (iface.ifa_flags & C.IFF_RUNNING == 0) return true; - if (iface.ifa_flags & C.IFF_UP == 0) return true; + if (iface.ifa_flags & c.IFF_RUNNING == 0) return true; + if (iface.ifa_flags & c.IFF_UP == 0) return true; if (iface.ifa_addr == null) return true; return false; @@ -451,18 +497,18 @@ fn networkInterfacesPosix(globalThis: *JSC.JSGlobalObject) bun.JSError!JSC.JSVal // We won't actually return link-layer interfaces but we need them for // extracting the MAC address - pub fn isLinkLayer(iface: *C.ifaddrs) bool { + pub fn isLinkLayer(iface: *c.ifaddrs) bool { if (iface.ifa_addr == null) return false; return if (comptime Environment.isLinux) return iface.ifa_addr.*.sa_family == std.posix.AF.PACKET else if (comptime Environment.isMac) - return iface.ifa_addr.?.*.family == std.posix.AF.LINK + return iface.ifa_addr.?.*.sa_family == std.posix.AF.LINK else - unreachable; + @compileError("unreachable"); } - pub fn isLoopback(iface: *C.ifaddrs) bool { - return iface.ifa_flags & C.IFF_LOOPBACK == C.IFF_LOOPBACK; + pub fn isLoopback(iface: *c.ifaddrs) bool { + return iface.ifa_flags & c.IFF_LOOPBACK == c.IFF_LOOPBACK; } }; @@ -552,7 +598,7 @@ fn networkInterfacesPosix(globalThis: *JSC.JSGlobalObject) bun.JSError!JSC.JSVal if (comptime Environment.isLinux) { break @as(?*std.posix.sockaddr.ll, @ptrCast(@alignCast(ll_iface.ifa_addr))); } else if (comptime Environment.isMac) { - break @as(?*C.sockaddr_dl, @ptrCast(@alignCast(ll_iface.ifa_addr))); + break @as(?*c.sockaddr_dl, @ptrCast(@alignCast(ll_iface.ifa_addr))); } else { @compileError("unreachable"); } @@ -720,11 +766,61 @@ fn networkInterfacesWindows(globalThis: *JSC.JSGlobalObject) bun.JSError!JSC.JSV pub fn release() bun.String { var name_buffer: [bun.HOST_NAME_MAX]u8 = undefined; - return bun.String.createUTF8(C.getRelease(&name_buffer)); + + const value = switch (Environment.os) { + .linux => slice: { + const uts = std.posix.uname(); + const result = bun.sliceTo(&uts.release, 0); + bun.copy(u8, &name_buffer, result); + + break :slice name_buffer[0..result.len]; + }, + .mac => slice: { + @memset(&name_buffer, 0); + + var size: usize = name_buffer.len; + + if (std.c.sysctlbyname( + "kern.osrelease", + &name_buffer, + &size, + null, + 0, + ) == -1) break :slice "unknown"; + + break :slice bun.sliceTo(&name_buffer, 0); + }, + .windows => slice: { + var info: bun.windows.libuv.uv_utsname_s = undefined; + const err = bun.windows.libuv.uv_os_uname(&info); + if (err != 0) { + break :slice "unknown"; + } + const value = bun.sliceTo(&info.release, 0); + @memcpy(name_buffer[0..value.len], value); + break :slice name_buffer[0..value.len]; + }, + else => @compileError("unsupported os"), + }; + + return bun.String.createUTF8(value); +} + +pub extern fn set_process_priority(pid: i32, priority: i32) i32; +pub fn setProcessPriorityImpl(pid: i32, priority: i32) std.c.E { + if (pid < 0) return .SRCH; + + const code: i32 = set_process_priority(pid, priority); + + if (code == -2) return .SRCH; + if (code == 0) return .SUCCESS; + + const errcode = bun.sys.getErrno(code); + return @enumFromInt(@intFromEnum(errcode)); } pub fn setPriority1(global: *JSC.JSGlobalObject, pid: i32, priority: i32) !void { - const errcode = C.setProcessPriority(pid, priority); + const errcode = setProcessPriorityImpl(pid, priority); switch (errcode) { .SRCH => { const err = JSC.SystemError{ @@ -773,26 +869,75 @@ pub fn setPriority2(global: *JSC.JSGlobalObject, priority: i32) !void { } pub fn totalmem() u64 { - return C.getTotalMemory(); + switch (bun.Environment.os) { + .mac => { + var memory_: [32]c_ulonglong = undefined; + var size: usize = memory_.len; + + std.posix.sysctlbynameZ( + "hw.memsize", + &memory_, + &size, + null, + 0, + ) catch |err| switch (err) { + else => return 0, + }; + + return memory_[0]; + }, + .linux => { + var info: c.struct_sysinfo = undefined; + if (c.sysinfo(&info) == @as(c_int, 0)) return @as(u64, @bitCast(info.totalram)) *% @as(c_ulong, @bitCast(@as(c_ulong, info.mem_unit))); + return 0; + }, + .windows => { + return libuv.uv_get_total_memory(); + }, + else => @compileError("unsupported os"), + } } pub fn uptime(global: *JSC.JSGlobalObject) bun.JSError!f64 { - if (Environment.isWindows) { - var uptime_value: f64 = undefined; - const err = libuv.uv_uptime(&uptime_value); - if (err != 0) { - const sys_err = JSC.SystemError{ - .message = bun.String.static("failed to get system uptime"), - .code = bun.String.static("ERR_SYSTEM_ERROR"), - .errno = err, - .syscall = bun.String.static("uv_uptime"), - }; - return global.throwValue(sys_err.toErrorInstance(global)); - } - return uptime_value; - } + switch (Environment.os) { + .windows => { + var uptime_value: f64 = undefined; + const err = libuv.uv_uptime(&uptime_value); + if (err != 0) { + const sys_err = JSC.SystemError{ + .message = bun.String.static("failed to get system uptime"), + .code = bun.String.static("ERR_SYSTEM_ERROR"), + .errno = err, + .syscall = bun.String.static("uv_uptime"), + }; + return global.throwValue(sys_err.toErrorInstance(global)); + } + return uptime_value; + }, + .mac => { + var boot_time: std.posix.timeval = undefined; + var size: usize = @sizeOf(@TypeOf(boot_time)); - return @floatFromInt(C.getSystemUptime()); + std.posix.sysctlbynameZ( + "kern.boottime", + &boot_time, + &size, + null, + 0, + ) catch |err| switch (err) { + else => return 0, + }; + + return @floatFromInt(std.time.timestamp() - boot_time.sec); + }, + .linux => { + var info: c.struct_sysinfo = undefined; + if (c.sysinfo(&info) == 0) + return @floatFromInt(info.uptime); + return 0; + }, + else => @compileError("unsupported os"), + } } pub fn userInfo(globalThis: *JSC.JSGlobalObject, options: gen.UserInfoOptions) bun.JSError!JSC.JSValue { @@ -815,8 +960,8 @@ pub fn userInfo(globalThis: *JSC.JSGlobalObject, options: gen.UserInfoOptions) b result.put(globalThis, JSC.ZigString.static("username"), JSC.ZigString.init(username).withEncoding().toJS(globalThis)); result.put(globalThis, JSC.ZigString.static("shell"), JSC.ZigString.init(bun.getenvZ("SHELL") orelse "unknown").withEncoding().toJS(globalThis)); - result.put(globalThis, JSC.ZigString.static("uid"), JSC.JSValue.jsNumber(C.getuid())); - result.put(globalThis, JSC.ZigString.static("gid"), JSC.JSValue.jsNumber(C.getgid())); + result.put(globalThis, JSC.ZigString.static("uid"), JSC.JSValue.jsNumber(c.getuid())); + result.put(globalThis, JSC.ZigString.static("gid"), JSC.JSValue.jsNumber(c.getgid())); } return result; @@ -824,7 +969,44 @@ pub fn userInfo(globalThis: *JSC.JSGlobalObject, options: gen.UserInfoOptions) b pub fn version() bun.JSError!bun.String { var name_buffer: [bun.HOST_NAME_MAX]u8 = undefined; - return bun.String.createUTF8(C.getVersion(&name_buffer)); + + const slice: []const u8 = switch (Environment.os) { + .mac => slice: { + @memset(&name_buffer, 0); + + var size: usize = name_buffer.len; + + if (std.c.sysctlbyname( + "kern.version", + &name_buffer, + &size, + null, + 0, + ) == -1) break :slice "unknown"; + + break :slice bun.sliceTo(&name_buffer, 0); + }, + .linux => slice: { + const uts = std.posix.uname(); + const result = bun.sliceTo(&uts.version, 0); + bun.copy(u8, &name_buffer, result); + + break :slice name_buffer[0..result.len]; + }, + .windows => slice: { + var info: bun.windows.libuv.uv_utsname_s = undefined; + const err = bun.windows.libuv.uv_os_uname(&info); + if (err != 0) { + break :slice "unknown"; + } + const slice = bun.sliceTo(&info.version, 0); + @memcpy(name_buffer[0..slice.len], slice); + break :slice name_buffer[0..slice.len]; + }, + else => @compileError("unsupported os"), + }; + + return bun.String.createUTF8(slice); } /// Given a netmask returns a CIDR suffix. Returns null if the mask is not valid. diff --git a/src/bun.js/node/node_process.zig b/src/bun.js/node/node_process.zig new file mode 100644 index 0000000000..ef65a743e9 --- /dev/null +++ b/src/bun.js/node/node_process.zig @@ -0,0 +1,325 @@ +//! Process information and control APIs (`globalThis.process` / `node:process`) +comptime { + @export(&getTitle, .{ .name = "Bun__Process__getTitle" }); + @export(&setTitle, .{ .name = "Bun__Process__setTitle" }); + @export(&getArgv, .{ .name = "Bun__Process__getArgv" }); + @export(&getCwd, .{ .name = "Bun__Process__getCwd" }); + @export(&setCwd, .{ .name = "Bun__Process__setCwd" }); + @export(&exit, .{ .name = "Bun__Process__exit" }); + @export(&getArgv0, .{ .name = "Bun__Process__getArgv0" }); + @export(&getExecPath, .{ .name = "Bun__Process__getExecPath" }); + @export(&getExecArgv, .{ .name = "Bun__Process__getExecArgv" }); +} + +var title_mutex = bun.Mutex{}; + +pub fn getTitle(_: *JSGlobalObject, title: *ZigString) callconv(.C) void { + title_mutex.lock(); + defer title_mutex.unlock(); + const str = bun.CLI.Bun__Node__ProcessTitle; + title.* = ZigString.init(str orelse "bun"); +} + +// TODO: https://github.com/nodejs/node/blob/master/deps/uv/src/unix/darwin-proctitle.c +pub fn setTitle(globalObject: *JSGlobalObject, newvalue: *ZigString) callconv(.C) JSValue { + title_mutex.lock(); + defer title_mutex.unlock(); + if (bun.CLI.Bun__Node__ProcessTitle) |_| bun.default_allocator.free(bun.CLI.Bun__Node__ProcessTitle.?); + bun.CLI.Bun__Node__ProcessTitle = newvalue.dupe(bun.default_allocator) catch bun.outOfMemory(); + return newvalue.toJS(globalObject); +} + +pub fn getArgv0(globalObject: *JSC.JSGlobalObject) callconv(.C) JSC.JSValue { + return JSC.ZigString.fromUTF8(bun.argv[0]).toJS(globalObject); +} + +pub fn getExecPath(globalObject: *JSC.JSGlobalObject) callconv(.C) JSC.JSValue { + const out = bun.selfExePath() catch { + // if for any reason we are unable to get the executable path, we just return argv[0] + return getArgv0(globalObject); + }; + + return JSC.ZigString.fromUTF8(out).toJS(globalObject); +} + +pub fn getExecArgv(globalObject: *JSC.JSGlobalObject) callconv(.C) JSC.JSValue { + var sfb = std.heap.stackFallback(4096, globalObject.allocator()); + const temp_alloc = sfb.get(); + const vm = globalObject.bunVM(); + + if (vm.worker) |worker| { + // was explicitly overridden for the worker? + if (worker.execArgv) |execArgv| { + const array = JSC.JSValue.createEmptyArray(globalObject, execArgv.len); + for (0..execArgv.len) |i| { + array.putIndex(globalObject, @intCast(i), bun.String.init(execArgv[i]).toJS(globalObject)); + } + return array; + } + } + + var args = std.ArrayList(bun.String).initCapacity(temp_alloc, bun.argv.len - 1) catch bun.outOfMemory(); + defer args.deinit(); + defer for (args.items) |*arg| arg.deref(); + + var seen_run = false; + var prev: ?[]const u8 = null; + + // we re-parse the process argv to extract execArgv, since this is a very uncommon operation + // it isn't worth doing this as a part of the CLI + for (bun.argv[@min(1, bun.argv.len)..]) |arg| { + defer prev = arg; + + if (arg.len >= 1 and arg[0] == '-') { + args.append(bun.String.createUTF8(arg)) catch bun.outOfMemory(); + continue; + } + + if (!seen_run and bun.strings.eqlComptime(arg, "run")) { + seen_run = true; + continue; + } + + // A set of execArgv args consume an extra argument, so we do not want to + // confuse these with script names. + const map = bun.ComptimeStringMap(void, comptime brk: { + const auto_params = bun.CLI.Arguments.auto_params; + const KV = struct { []const u8, void }; + var entries: [auto_params.len]KV = undefined; + var i = 0; + for (auto_params) |param| { + if (param.takes_value != .none) { + if (param.names.long) |name| { + entries[i] = .{ "--" ++ name, {} }; + i += 1; + } + if (param.names.short) |name| { + entries[i] = .{ &[_]u8{ '-', name }, {} }; + i += 1; + } + } + } + + var result: [i]KV = undefined; + @memcpy(&result, entries[0..i]); + break :brk result; + }); + + if (prev) |p| if (map.has(p)) { + args.append(bun.String.createUTF8(arg)) catch @panic("OOM"); + continue; + }; + + // we hit the script name + break; + } + + return bun.String.toJSArray(globalObject, args.items); +} + +pub fn getArgv(globalObject: *JSC.JSGlobalObject) callconv(.C) JSC.JSValue { + const vm = globalObject.bunVM(); + + // Allocate up to 32 strings in stack + var stack_fallback_allocator = std.heap.stackFallback( + 32 * @sizeOf(JSC.ZigString) + (bun.MAX_PATH_BYTES + 1) + 32, + bun.default_allocator, + ); + const allocator = stack_fallback_allocator.get(); + + var args_count: usize = vm.argv.len; + if (vm.worker) |worker| { + args_count = worker.argv.len; + } + + const args = allocator.alloc( + bun.String, + // argv omits "bun" because it could be "bun run" or "bun" and it's kind of ambiguous + // argv also omits the script name + args_count + 2, + ) catch bun.outOfMemory(); + var args_list: std.ArrayListUnmanaged(bun.String) = .initBuffer(args); + + if (vm.standalone_module_graph != null) { + // Don't break user's code because they did process.argv.slice(2) + // Even if they didn't type "bun", we still want to add it as argv[0] + args_list.appendAssumeCapacity( + bun.String.static("bun"), + ); + } else { + const exe_path = bun.selfExePath() catch null; + args_list.appendAssumeCapacity( + if (exe_path) |str| bun.String.fromUTF8(str) else bun.String.static("bun"), + ); + } + + if (vm.main.len > 0 and + !strings.endsWithComptime(vm.main, bun.pathLiteral("/[eval]")) and + !strings.endsWithComptime(vm.main, bun.pathLiteral("/[stdin]"))) + { + if (vm.worker != null and vm.worker.?.eval_mode) { + args_list.appendAssumeCapacity(bun.String.static("[worker eval]")); + } else { + args_list.appendAssumeCapacity(bun.String.fromUTF8(vm.main)); + } + } + + defer allocator.free(args); + + if (vm.worker) |worker| { + for (worker.argv) |arg| { + args_list.appendAssumeCapacity(bun.String.init(arg)); + } + } else { + for (vm.argv) |arg| { + const str = bun.String.fromUTF8(arg); + // https://github.com/yargs/yargs/blob/adb0d11e02c613af3d9427b3028cc192703a3869/lib/utils/process-argv.ts#L1 + args_list.appendAssumeCapacity(str); + } + } + + return bun.String.toJSArray(globalObject, args_list.items); +} + +pub fn getCwd(globalObject: *JSC.JSGlobalObject) callconv(.C) JSC.JSValue { + return JSC.toJSHostValue(globalObject, getCwd_(globalObject)); +} +fn getCwd_(globalObject: *JSC.JSGlobalObject) bun.JSError!JSC.JSValue { + var buf: bun.PathBuffer = undefined; + switch (bun.api.node.path.getCwd(&buf)) { + .result => |r| return JSC.ZigString.init(r).withEncoding().toJS(globalObject), + .err => |e| { + return globalObject.throwValue(e.toJSC(globalObject)); + }, + } +} + +pub fn setCwd(globalObject: *JSC.JSGlobalObject, to: *JSC.ZigString) callconv(.C) JSC.JSValue { + return JSC.toJSHostValue(globalObject, setCwd_(globalObject, to)); +} +fn setCwd_(globalObject: *JSC.JSGlobalObject, to: *JSC.ZigString) bun.JSError!JSC.JSValue { + if (to.len == 0) { + return globalObject.throwInvalidArguments("Expected path to be a non-empty string", .{}); + } + const vm = globalObject.bunVM(); + const fs = vm.transpiler.fs; + + var buf: bun.PathBuffer = undefined; + const slice = to.sliceZBuf(&buf) catch return globalObject.throw("Invalid path", .{}); + + switch (Syscall.chdir(fs.top_level_dir, slice)) { + .result => { + // When we update the cwd from JS, we have to update the bundler's version as well + // However, this might be called many times in a row, so we use a pre-allocated buffer + // that way we don't have to worry about garbage collector + const into_cwd_buf = switch (bun.sys.getcwd(&buf)) { + .result => |r| r, + .err => |err| { + _ = Syscall.chdir(fs.top_level_dir, fs.top_level_dir); + return globalObject.throwValue(err.toJSC(globalObject)); + }, + }; + @memcpy(fs.top_level_dir_buf[0..into_cwd_buf.len], into_cwd_buf); + fs.top_level_dir_buf[into_cwd_buf.len] = 0; + fs.top_level_dir = fs.top_level_dir_buf[0..into_cwd_buf.len :0]; + + const len = fs.top_level_dir.len; + // Ensure the path ends with a slash + if (fs.top_level_dir_buf[len - 1] != std.fs.path.sep) { + fs.top_level_dir_buf[len] = std.fs.path.sep; + fs.top_level_dir_buf[len + 1] = 0; + fs.top_level_dir = fs.top_level_dir_buf[0 .. len + 1 :0]; + } + const withoutTrailingSlash = if (Environment.isWindows) strings.withoutTrailingSlashWindowsPath else strings.withoutTrailingSlash; + var str = bun.String.createUTF8(withoutTrailingSlash(fs.top_level_dir)); + return str.transferToJS(globalObject); + }, + .err => |e| { + return globalObject.throwValue(e.toJSC(globalObject)); + }, + } +} + +// TODO(@190n) this may need to be noreturn +pub fn exit(globalObject: *JSC.JSGlobalObject, code: u8) callconv(.c) void { + var vm = globalObject.bunVM(); + if (vm.worker) |worker| { + vm.exit_handler.exit_code = code; + worker.requestTerminate(); + return; + } + + vm.exit_handler.exit_code = code; + vm.onExit(); + vm.globalExit(); +} + +// TODO: switch this to using *bun.wtf.String when it is added +pub fn Bun__Process__editWindowsEnvVar(k: bun.String, v: bun.String) callconv(.C) void { + comptime bun.assert(bun.Environment.isWindows); + if (k.tag == .Empty) return; + const wtf1 = k.value.WTFStringImpl; + var fixed_stack_allocator = std.heap.stackFallback(1025, bun.default_allocator); + const allocator = fixed_stack_allocator.get(); + var buf1 = allocator.alloc(u16, k.utf16ByteLength() + 1) catch bun.outOfMemory(); + defer allocator.free(buf1); + var buf2 = allocator.alloc(u16, v.utf16ByteLength() + 1) catch bun.outOfMemory(); + defer allocator.free(buf2); + const len1: usize = switch (wtf1.is8Bit()) { + true => bun.strings.copyLatin1IntoUTF16([]u16, buf1, []const u8, wtf1.latin1Slice()).written, + false => b: { + @memcpy(buf1[0..wtf1.length()], wtf1.utf16Slice()); + break :b wtf1.length(); + }, + }; + buf1[len1] = 0; + const str2: ?[*:0]const u16 = if (v.tag != .Dead) str: { + if (v.tag == .Empty) break :str (&[_]u16{0})[0..0 :0]; + const wtf2 = v.value.WTFStringImpl; + const len2: usize = switch (wtf2.is8Bit()) { + true => bun.strings.copyLatin1IntoUTF16([]u16, buf2, []const u8, wtf2.latin1Slice()).written, + false => b: { + @memcpy(buf2[0..wtf2.length()], wtf2.utf16Slice()); + break :b wtf2.length(); + }, + }; + buf2[len2] = 0; + break :str buf2[0..len2 :0].ptr; + } else null; + _ = bun.c.SetEnvironmentVariableW(buf1[0..len1 :0].ptr, str2); +} + +comptime { + if (Environment.export_cpp_apis and Environment.isWindows) { + @export(&Bun__Process__editWindowsEnvVar, .{ .name = "Bun__Process__editWindowsEnvVar" }); + } +} + +pub export const Bun__version: [*:0]const u8 = "v" ++ bun.Global.package_json_version; +pub export const Bun__version_with_sha: [*:0]const u8 = "v" ++ bun.Global.package_json_version_with_sha; +pub export const Bun__versions_boringssl: [*:0]const u8 = bun.Global.versions.boringssl; +pub export const Bun__versions_libarchive: [*:0]const u8 = bun.Global.versions.libarchive; +pub export const Bun__versions_mimalloc: [*:0]const u8 = bun.Global.versions.mimalloc; +pub export const Bun__versions_picohttpparser: [*:0]const u8 = bun.Global.versions.picohttpparser; +pub export const Bun__versions_uws: [*:0]const u8 = bun.Environment.git_sha; +pub export const Bun__versions_webkit: [*:0]const u8 = bun.Global.versions.webkit; +pub export const Bun__versions_zig: [*:0]const u8 = bun.Global.versions.zig; +pub export const Bun__versions_zlib: [*:0]const u8 = bun.Global.versions.zlib; +pub export const Bun__versions_tinycc: [*:0]const u8 = bun.Global.versions.tinycc; +pub export const Bun__versions_lolhtml: [*:0]const u8 = bun.Global.versions.lolhtml; +pub export const Bun__versions_c_ares: [*:0]const u8 = bun.Global.versions.c_ares; +pub export const Bun__versions_libdeflate: [*:0]const u8 = bun.Global.versions.libdeflate; +pub export const Bun__versions_usockets: [*:0]const u8 = bun.Environment.git_sha; +pub export const Bun__version_sha: [*:0]const u8 = bun.Environment.git_sha; +pub export const Bun__versions_lshpack: [*:0]const u8 = bun.Global.versions.lshpack; +pub export const Bun__versions_zstd: [*:0]const u8 = bun.Global.versions.zstd; + +const std = @import("std"); +const Environment = bun.Environment; +const bun = @import("bun"); +const JSC = bun.JSC; +const JSGlobalObject = JSC.JSGlobalObject; +const JSValue = JSC.JSValue; +const ZigString = JSC.ZigString; +const Syscall = bun.sys; +const strings = bun.strings; diff --git a/src/bun.js/node/node_util_binding.zig b/src/bun.js/node/node_util_binding.zig index 2d709d60d4..3946e8f5a4 100644 --- a/src/bun.js/node/node_util_binding.zig +++ b/src/bun.js/node/node_util_binding.zig @@ -36,85 +36,85 @@ pub fn internalErrorName(globalThis: *JSC.JSGlobalObject, callframe: *JSC.CallFr if (err_int == -3013) return bun.String.static("EAI_BADHINTS").toJS(globalThis); if (err_int == -3014) return bun.String.static("EAI_PROTOCOL").toJS(globalThis); - if (err_int == -bun.C.UV_E2BIG) return bun.String.static("E2BIG").toJS(globalThis); - if (err_int == -bun.C.UV_EACCES) return bun.String.static("EACCES").toJS(globalThis); - if (err_int == -bun.C.UV_EADDRINUSE) return bun.String.static("EADDRINUSE").toJS(globalThis); - if (err_int == -bun.C.UV_EADDRNOTAVAIL) return bun.String.static("EADDRNOTAVAIL").toJS(globalThis); - if (err_int == -bun.C.UV_EAFNOSUPPORT) return bun.String.static("EAFNOSUPPORT").toJS(globalThis); - if (err_int == -bun.C.UV_EAGAIN) return bun.String.static("EAGAIN").toJS(globalThis); - if (err_int == -bun.C.UV_EALREADY) return bun.String.static("EALREADY").toJS(globalThis); - if (err_int == -bun.C.UV_EBADF) return bun.String.static("EBADF").toJS(globalThis); - if (err_int == -bun.C.UV_EBUSY) return bun.String.static("EBUSY").toJS(globalThis); - if (err_int == -bun.C.UV_ECANCELED) return bun.String.static("ECANCELED").toJS(globalThis); - if (err_int == -bun.C.UV_ECHARSET) return bun.String.static("ECHARSET").toJS(globalThis); - if (err_int == -bun.C.UV_ECONNABORTED) return bun.String.static("ECONNABORTED").toJS(globalThis); - if (err_int == -bun.C.UV_ECONNREFUSED) return bun.String.static("ECONNREFUSED").toJS(globalThis); - if (err_int == -bun.C.UV_ECONNRESET) return bun.String.static("ECONNRESET").toJS(globalThis); - if (err_int == -bun.C.UV_EDESTADDRREQ) return bun.String.static("EDESTADDRREQ").toJS(globalThis); - if (err_int == -bun.C.UV_EEXIST) return bun.String.static("EEXIST").toJS(globalThis); - if (err_int == -bun.C.UV_EFAULT) return bun.String.static("EFAULT").toJS(globalThis); - if (err_int == -bun.C.UV_EHOSTUNREACH) return bun.String.static("EHOSTUNREACH").toJS(globalThis); - if (err_int == -bun.C.UV_EINTR) return bun.String.static("EINTR").toJS(globalThis); - if (err_int == -bun.C.UV_EINVAL) return bun.String.static("EINVAL").toJS(globalThis); - if (err_int == -bun.C.UV_EIO) return bun.String.static("EIO").toJS(globalThis); - if (err_int == -bun.C.UV_EISCONN) return bun.String.static("EISCONN").toJS(globalThis); - if (err_int == -bun.C.UV_EISDIR) return bun.String.static("EISDIR").toJS(globalThis); - if (err_int == -bun.C.UV_ELOOP) return bun.String.static("ELOOP").toJS(globalThis); - if (err_int == -bun.C.UV_EMFILE) return bun.String.static("EMFILE").toJS(globalThis); - if (err_int == -bun.C.UV_EMSGSIZE) return bun.String.static("EMSGSIZE").toJS(globalThis); - if (err_int == -bun.C.UV_ENAMETOOLONG) return bun.String.static("ENAMETOOLONG").toJS(globalThis); - if (err_int == -bun.C.UV_ENETDOWN) return bun.String.static("ENETDOWN").toJS(globalThis); - if (err_int == -bun.C.UV_ENETUNREACH) return bun.String.static("ENETUNREACH").toJS(globalThis); - if (err_int == -bun.C.UV_ENFILE) return bun.String.static("ENFILE").toJS(globalThis); - if (err_int == -bun.C.UV_ENOBUFS) return bun.String.static("ENOBUFS").toJS(globalThis); - if (err_int == -bun.C.UV_ENODEV) return bun.String.static("ENODEV").toJS(globalThis); - if (err_int == -bun.C.UV_ENOENT) return bun.String.static("ENOENT").toJS(globalThis); - if (err_int == -bun.C.UV_ENOMEM) return bun.String.static("ENOMEM").toJS(globalThis); - if (err_int == -bun.C.UV_ENONET) return bun.String.static("ENONET").toJS(globalThis); - if (err_int == -bun.C.UV_ENOSPC) return bun.String.static("ENOSPC").toJS(globalThis); - if (err_int == -bun.C.UV_ENOSYS) return bun.String.static("ENOSYS").toJS(globalThis); - if (err_int == -bun.C.UV_ENOTCONN) return bun.String.static("ENOTCONN").toJS(globalThis); - if (err_int == -bun.C.UV_ENOTDIR) return bun.String.static("ENOTDIR").toJS(globalThis); - if (err_int == -bun.C.UV_ENOTEMPTY) return bun.String.static("ENOTEMPTY").toJS(globalThis); - if (err_int == -bun.C.UV_ENOTSOCK) return bun.String.static("ENOTSOCK").toJS(globalThis); - if (err_int == -bun.C.UV_ENOTSUP) return bun.String.static("ENOTSUP").toJS(globalThis); - if (err_int == -bun.C.UV_EPERM) return bun.String.static("EPERM").toJS(globalThis); - if (err_int == -bun.C.UV_EPIPE) return bun.String.static("EPIPE").toJS(globalThis); - if (err_int == -bun.C.UV_EPROTO) return bun.String.static("EPROTO").toJS(globalThis); - if (err_int == -bun.C.UV_EPROTONOSUPPORT) return bun.String.static("EPROTONOSUPPORT").toJS(globalThis); - if (err_int == -bun.C.UV_EPROTOTYPE) return bun.String.static("EPROTOTYPE").toJS(globalThis); - if (err_int == -bun.C.UV_EROFS) return bun.String.static("EROFS").toJS(globalThis); - if (err_int == -bun.C.UV_ESHUTDOWN) return bun.String.static("ESHUTDOWN").toJS(globalThis); - if (err_int == -bun.C.UV_ESPIPE) return bun.String.static("ESPIPE").toJS(globalThis); - if (err_int == -bun.C.UV_ESRCH) return bun.String.static("ESRCH").toJS(globalThis); - if (err_int == -bun.C.UV_ETIMEDOUT) return bun.String.static("ETIMEDOUT").toJS(globalThis); - if (err_int == -bun.C.UV_ETXTBSY) return bun.String.static("ETXTBSY").toJS(globalThis); - if (err_int == -bun.C.UV_EXDEV) return bun.String.static("EXDEV").toJS(globalThis); - if (err_int == -bun.C.UV_EFBIG) return bun.String.static("EFBIG").toJS(globalThis); - if (err_int == -bun.C.UV_ENOPROTOOPT) return bun.String.static("ENOPROTOOPT").toJS(globalThis); - if (err_int == -bun.C.UV_ERANGE) return bun.String.static("ERANGE").toJS(globalThis); - if (err_int == -bun.C.UV_ENXIO) return bun.String.static("ENXIO").toJS(globalThis); - if (err_int == -bun.C.UV_EMLINK) return bun.String.static("EMLINK").toJS(globalThis); - if (err_int == -bun.C.UV_EHOSTDOWN) return bun.String.static("EHOSTDOWN").toJS(globalThis); - if (err_int == -bun.C.UV_EREMOTEIO) return bun.String.static("EREMOTEIO").toJS(globalThis); - if (err_int == -bun.C.UV_ENOTTY) return bun.String.static("ENOTTY").toJS(globalThis); - if (err_int == -bun.C.UV_EFTYPE) return bun.String.static("EFTYPE").toJS(globalThis); - if (err_int == -bun.C.UV_EILSEQ) return bun.String.static("EILSEQ").toJS(globalThis); - if (err_int == -bun.C.UV_EOVERFLOW) return bun.String.static("EOVERFLOW").toJS(globalThis); - if (err_int == -bun.C.UV_ESOCKTNOSUPPORT) return bun.String.static("ESOCKTNOSUPPORT").toJS(globalThis); - if (err_int == -bun.C.UV_ENODATA) return bun.String.static("ENODATA").toJS(globalThis); - if (err_int == -bun.C.UV_EUNATCH) return bun.String.static("EUNATCH").toJS(globalThis); + if (err_int == -bun.sys.UV_E.@"2BIG") return bun.String.static("E2BIG").toJS(globalThis); + if (err_int == -bun.sys.UV_E.ACCES) return bun.String.static("EACCES").toJS(globalThis); + if (err_int == -bun.sys.UV_E.ADDRINUSE) return bun.String.static("EADDRINUSE").toJS(globalThis); + if (err_int == -bun.sys.UV_E.ADDRNOTAVAIL) return bun.String.static("EADDRNOTAVAIL").toJS(globalThis); + if (err_int == -bun.sys.UV_E.AFNOSUPPORT) return bun.String.static("EAFNOSUPPORT").toJS(globalThis); + if (err_int == -bun.sys.UV_E.AGAIN) return bun.String.static("EAGAIN").toJS(globalThis); + if (err_int == -bun.sys.UV_E.ALREADY) return bun.String.static("EALREADY").toJS(globalThis); + if (err_int == -bun.sys.UV_E.BADF) return bun.String.static("EBADF").toJS(globalThis); + if (err_int == -bun.sys.UV_E.BUSY) return bun.String.static("EBUSY").toJS(globalThis); + if (err_int == -bun.sys.UV_E.CANCELED) return bun.String.static("ECANCELED").toJS(globalThis); + if (err_int == -bun.sys.UV_E.CHARSET) return bun.String.static("ECHARSET").toJS(globalThis); + if (err_int == -bun.sys.UV_E.CONNABORTED) return bun.String.static("ECONNABORTED").toJS(globalThis); + if (err_int == -bun.sys.UV_E.CONNREFUSED) return bun.String.static("ECONNREFUSED").toJS(globalThis); + if (err_int == -bun.sys.UV_E.CONNRESET) return bun.String.static("ECONNRESET").toJS(globalThis); + if (err_int == -bun.sys.UV_E.DESTADDRREQ) return bun.String.static("EDESTADDRREQ").toJS(globalThis); + if (err_int == -bun.sys.UV_E.EXIST) return bun.String.static("EEXIST").toJS(globalThis); + if (err_int == -bun.sys.UV_E.FAULT) return bun.String.static("EFAULT").toJS(globalThis); + if (err_int == -bun.sys.UV_E.HOSTUNREACH) return bun.String.static("EHOSTUNREACH").toJS(globalThis); + if (err_int == -bun.sys.UV_E.INTR) return bun.String.static("EINTR").toJS(globalThis); + if (err_int == -bun.sys.UV_E.INVAL) return bun.String.static("EINVAL").toJS(globalThis); + if (err_int == -bun.sys.UV_E.IO) return bun.String.static("EIO").toJS(globalThis); + if (err_int == -bun.sys.UV_E.ISCONN) return bun.String.static("EISCONN").toJS(globalThis); + if (err_int == -bun.sys.UV_E.ISDIR) return bun.String.static("EISDIR").toJS(globalThis); + if (err_int == -bun.sys.UV_E.LOOP) return bun.String.static("ELOOP").toJS(globalThis); + if (err_int == -bun.sys.UV_E.MFILE) return bun.String.static("EMFILE").toJS(globalThis); + if (err_int == -bun.sys.UV_E.MSGSIZE) return bun.String.static("EMSGSIZE").toJS(globalThis); + if (err_int == -bun.sys.UV_E.NAMETOOLONG) return bun.String.static("ENAMETOOLONG").toJS(globalThis); + if (err_int == -bun.sys.UV_E.NETDOWN) return bun.String.static("ENETDOWN").toJS(globalThis); + if (err_int == -bun.sys.UV_E.NETUNREACH) return bun.String.static("ENETUNREACH").toJS(globalThis); + if (err_int == -bun.sys.UV_E.NFILE) return bun.String.static("ENFILE").toJS(globalThis); + if (err_int == -bun.sys.UV_E.NOBUFS) return bun.String.static("ENOBUFS").toJS(globalThis); + if (err_int == -bun.sys.UV_E.NODEV) return bun.String.static("ENODEV").toJS(globalThis); + if (err_int == -bun.sys.UV_E.NOENT) return bun.String.static("ENOENT").toJS(globalThis); + if (err_int == -bun.sys.UV_E.NOMEM) return bun.String.static("ENOMEM").toJS(globalThis); + if (err_int == -bun.sys.UV_E.NONET) return bun.String.static("ENONET").toJS(globalThis); + if (err_int == -bun.sys.UV_E.NOSPC) return bun.String.static("ENOSPC").toJS(globalThis); + if (err_int == -bun.sys.UV_E.NOSYS) return bun.String.static("ENOSYS").toJS(globalThis); + if (err_int == -bun.sys.UV_E.NOTCONN) return bun.String.static("ENOTCONN").toJS(globalThis); + if (err_int == -bun.sys.UV_E.NOTDIR) return bun.String.static("ENOTDIR").toJS(globalThis); + if (err_int == -bun.sys.UV_E.NOTEMPTY) return bun.String.static("ENOTEMPTY").toJS(globalThis); + if (err_int == -bun.sys.UV_E.NOTSOCK) return bun.String.static("ENOTSOCK").toJS(globalThis); + if (err_int == -bun.sys.UV_E.NOTSUP) return bun.String.static("ENOTSUP").toJS(globalThis); + if (err_int == -bun.sys.UV_E.PERM) return bun.String.static("EPERM").toJS(globalThis); + if (err_int == -bun.sys.UV_E.PIPE) return bun.String.static("EPIPE").toJS(globalThis); + if (err_int == -bun.sys.UV_E.PROTO) return bun.String.static("EPROTO").toJS(globalThis); + if (err_int == -bun.sys.UV_E.PROTONOSUPPORT) return bun.String.static("EPROTONOSUPPORT").toJS(globalThis); + if (err_int == -bun.sys.UV_E.PROTOTYPE) return bun.String.static("EPROTOTYPE").toJS(globalThis); + if (err_int == -bun.sys.UV_E.ROFS) return bun.String.static("EROFS").toJS(globalThis); + if (err_int == -bun.sys.UV_E.SHUTDOWN) return bun.String.static("ESHUTDOWN").toJS(globalThis); + if (err_int == -bun.sys.UV_E.SPIPE) return bun.String.static("ESPIPE").toJS(globalThis); + if (err_int == -bun.sys.UV_E.SRCH) return bun.String.static("ESRCH").toJS(globalThis); + if (err_int == -bun.sys.UV_E.TIMEDOUT) return bun.String.static("ETIMEDOUT").toJS(globalThis); + if (err_int == -bun.sys.UV_E.TXTBSY) return bun.String.static("ETXTBSY").toJS(globalThis); + if (err_int == -bun.sys.UV_E.XDEV) return bun.String.static("EXDEV").toJS(globalThis); + if (err_int == -bun.sys.UV_E.FBIG) return bun.String.static("EFBIG").toJS(globalThis); + if (err_int == -bun.sys.UV_E.NOPROTOOPT) return bun.String.static("ENOPROTOOPT").toJS(globalThis); + if (err_int == -bun.sys.UV_E.RANGE) return bun.String.static("ERANGE").toJS(globalThis); + if (err_int == -bun.sys.UV_E.NXIO) return bun.String.static("ENXIO").toJS(globalThis); + if (err_int == -bun.sys.UV_E.MLINK) return bun.String.static("EMLINK").toJS(globalThis); + if (err_int == -bun.sys.UV_E.HOSTDOWN) return bun.String.static("EHOSTDOWN").toJS(globalThis); + if (err_int == -bun.sys.UV_E.REMOTEIO) return bun.String.static("EREMOTEIO").toJS(globalThis); + if (err_int == -bun.sys.UV_E.NOTTY) return bun.String.static("ENOTTY").toJS(globalThis); + if (err_int == -bun.sys.UV_E.FTYPE) return bun.String.static("EFTYPE").toJS(globalThis); + if (err_int == -bun.sys.UV_E.ILSEQ) return bun.String.static("EILSEQ").toJS(globalThis); + if (err_int == -bun.sys.UV_E.OVERFLOW) return bun.String.static("EOVERFLOW").toJS(globalThis); + if (err_int == -bun.sys.UV_E.SOCKTNOSUPPORT) return bun.String.static("ESOCKTNOSUPPORT").toJS(globalThis); + if (err_int == -bun.sys.UV_E.NODATA) return bun.String.static("ENODATA").toJS(globalThis); + if (err_int == -bun.sys.UV_E.UNATCH) return bun.String.static("EUNATCH").toJS(globalThis); var fmtstring = bun.String.createFormat("Unknown system error {d}", .{err_int}) catch bun.outOfMemory(); return fmtstring.transferToJS(globalThis); } pub fn etimedoutErrorCode(_: *JSC.JSGlobalObject, _: *JSC.CallFrame) bun.JSError!JSC.JSValue { - return JSC.JSValue.jsNumberFromInt32(-bun.C.UV_ETIMEDOUT); + return JSC.JSValue.jsNumberFromInt32(-bun.sys.UV_E.TIMEDOUT); } pub fn enobufsErrorCode(_: *JSC.JSGlobalObject, _: *JSC.CallFrame) bun.JSError!JSC.JSValue { - return JSC.JSValue.jsNumberFromInt32(-bun.C.UV_ENOBUFS); + return JSC.JSValue.jsNumberFromInt32(-bun.sys.UV_E.NOBUFS); } /// `extractedSplitNewLines` for ASCII/Latin1 strings. Panics if passed a non-string. diff --git a/src/bun.js/node/node_zlib_binding.zig b/src/bun.js/node/node_zlib_binding.zig index 1b2fdeb155..9cd417ff77 100644 --- a/src/bun.js/node/node_zlib_binding.zig +++ b/src/bun.js/node/node_zlib_binding.zig @@ -7,6 +7,7 @@ const Output = bun.Output; const ZigString = JSC.ZigString; const validators = @import("./util/validators.zig"); const debug = bun.Output.scoped(.zlib, true); +const Buffer = bun.api.node.Buffer; pub fn crc32(globalThis: *JSC.JSGlobalObject, callframe: *JSC.CallFrame) bun.JSError!JSC.JSValue { const arguments = callframe.arguments_old(2).ptr; @@ -20,7 +21,7 @@ pub fn crc32(globalThis: *JSC.JSGlobalObject, callframe: *JSC.CallFrame) bun.JSE if (data.isString()) { break :blk data.asString().toSlice(globalThis, bun.default_allocator); } - const buffer: JSC.Buffer = JSC.Buffer.fromJS(globalThis, data) orelse { + const buffer: Buffer = Buffer.fromJS(globalThis, data) orelse { const ty_str = data.jsTypeString(globalThis).toSlice(globalThis, bun.default_allocator); defer ty_str.deinit(); return globalThis.ERR(.INVALID_ARG_TYPE, "The \"data\" property must be an instance of Buffer, TypedArray, DataView, or ArrayBuffer. Received {s}", .{ty_str.slice()}).throw(); diff --git a/src/bun.js/node/path.zig b/src/bun.js/node/path.zig index 31541a44aa..1e51374c49 100644 --- a/src/bun.js/node/path.zig +++ b/src/bun.js/node/path.zig @@ -455,7 +455,7 @@ pub fn basename(globalObject: *JSC.JSGlobalObject, isWindows: bool, args_ptr: [* const pathZStr = path_ptr.getZigString(globalObject) catch return .zero; if (pathZStr.len == 0) return path_ptr; - var stack_fallback = std.heap.stackFallback(stack_fallback_size_small, JSC.getAllocator(globalObject)); + var stack_fallback = std.heap.stackFallback(stack_fallback_size_small, bun.default_allocator); const allocator = stack_fallback.get(); const pathZSlice = pathZStr.toSlice(allocator); @@ -647,7 +647,7 @@ pub fn dirname(globalObject: *JSC.JSGlobalObject, isWindows: bool, args_ptr: [*] const pathZStr = path_ptr.getZigString(globalObject) catch return .zero; if (pathZStr.len == 0) return toJSString(globalObject, CHAR_STR_DOT); - var stack_fallback = std.heap.stackFallback(stack_fallback_size_small, JSC.getAllocator(globalObject)); + var stack_fallback = std.heap.stackFallback(stack_fallback_size_small, bun.default_allocator); const allocator = stack_fallback.get(); const pathZSlice = pathZStr.toSlice(allocator); @@ -846,7 +846,7 @@ pub fn extname(globalObject: *JSC.JSGlobalObject, isWindows: bool, args_ptr: [*] const pathZStr = path_ptr.getZigString(globalObject) catch return .zero; if (pathZStr.len == 0) return path_ptr; - var stack_fallback = std.heap.stackFallback(stack_fallback_size_small, JSC.getAllocator(globalObject)); + var stack_fallback = std.heap.stackFallback(stack_fallback_size_small, bun.default_allocator); const allocator = stack_fallback.get(); const pathZSlice = pathZStr.toSlice(allocator); @@ -957,7 +957,7 @@ pub fn format(globalObject: *JSC.JSGlobalObject, isWindows: bool, args_ptr: [*]J return .zero; }; - var stack_fallback = std.heap.stackFallback(stack_fallback_size_small, JSC.getAllocator(globalObject)); + var stack_fallback = std.heap.stackFallback(stack_fallback_size_small, bun.default_allocator); const allocator = stack_fallback.get(); var root: []const u8 = ""; @@ -1677,7 +1677,7 @@ pub fn normalize(globalObject: *JSC.JSGlobalObject, isWindows: bool, args_ptr: [ const len = pathZStr.len; if (len == 0) return toJSString(globalObject, CHAR_STR_DOT); - var stack_fallback = std.heap.stackFallback(stack_fallback_size_small, JSC.getAllocator(globalObject)); + var stack_fallback = std.heap.stackFallback(stack_fallback_size_small, bun.default_allocator); const allocator = stack_fallback.get(); const pathZSlice = pathZStr.toSlice(allocator); @@ -2000,7 +2000,7 @@ pub fn parse(globalObject: *JSC.JSGlobalObject, isWindows: bool, args_ptr: [*]JS const pathZStr = path_ptr.getZigString(globalObject) catch return .zero; if (pathZStr.len == 0) return (PathParsed(u8){}).toJSObject(globalObject); - var stack_fallback = std.heap.stackFallback(stack_fallback_size_small, JSC.getAllocator(globalObject)); + var stack_fallback = std.heap.stackFallback(stack_fallback_size_small, bun.default_allocator); const allocator = stack_fallback.get(); const pathZSlice = pathZStr.toSlice(allocator); @@ -2367,7 +2367,7 @@ pub fn relative(globalObject: *JSC.JSGlobalObject, isWindows: bool, args_ptr: [* const toZigStr = to_ptr.getZigString(globalObject) catch return .zero; if ((fromZigStr.len + toZigStr.len) == 0) return from_ptr; - var stack_fallback = std.heap.stackFallback(stack_fallback_size_small, JSC.getAllocator(globalObject)); + var stack_fallback = std.heap.stackFallback(stack_fallback_size_small, bun.default_allocator); const allocator = stack_fallback.get(); var fromZigSlice = fromZigStr.toSlice(allocator); @@ -2956,7 +2956,7 @@ pub fn toNamespacedPath(globalObject: *JSC.JSGlobalObject, isWindows: bool, args const len = pathZStr.len; if (len == 0) return path_ptr; - var stack_fallback = std.heap.stackFallback(stack_fallback_size_small, JSC.getAllocator(globalObject)); + var stack_fallback = std.heap.stackFallback(stack_fallback_size_small, bun.default_allocator); const allocator = stack_fallback.get(); const pathZSlice = pathZStr.toSlice(allocator); diff --git a/src/bun.js/node/path_watcher.zig b/src/bun.js/node/path_watcher.zig index db35bd7026..6c32bb2af0 100644 --- a/src/bun.js/node/path_watcher.zig +++ b/src/bun.js/node/path_watcher.zig @@ -21,7 +21,7 @@ const Semaphore = sync.Semaphore; var default_manager_mutex: Mutex = .{}; var default_manager: ?*PathWatcherManager = null; -const FSWatcher = bun.JSC.Node.FSWatcher; +const FSWatcher = bun.api.node.fs.Watcher; const Event = FSWatcher.Event; const StringOrBytesToDecode = FSWatcher.FSWatchTaskWindows.StringOrBytesToDecode; @@ -93,7 +93,7 @@ pub const PathWatcherManager = struct { .windows => bun.sys.openDirAtWindowsA(bun.FD.cwd(), path, .{ .iterable = true, .read_only = true }), }) { .err => |e| { - if (e.errno == @intFromEnum(bun.C.E.NOTDIR)) { + if (e.errno == @intFromEnum(bun.sys.E.NOTDIR)) { const file = switch (bun.sys.open(path, 0, 0)) { .err => |file_err| return .{ .err = file_err.withPath(path) }, .result => |r| r, @@ -445,11 +445,11 @@ pub const PathWatcherManager = struct { return .{ .err = .{ .errno = @truncate(@intFromEnum(switch (err) { - error.AccessDenied => bun.C.E.ACCES, - error.SystemResources => bun.C.E.NOMEM, + error.AccessDenied => bun.sys.E.ACCES, + error.SystemResources => bun.sys.E.NOMEM, error.Unexpected, error.InvalidUtf8, - => bun.C.E.INVAL, + => bun.sys.E.INVAL, })), .syscall = .watch, }, @@ -478,7 +478,7 @@ pub const PathWatcherManager = struct { manager._decrementPathRef(entry_path_z); return switch (err) { error.OutOfMemory => .{ .err = .{ - .errno = @truncate(@intFromEnum(bun.C.E.NOMEM)), + .errno = @truncate(@intFromEnum(bun.sys.E.NOMEM)), .syscall = .watch, } }, }; @@ -551,8 +551,8 @@ pub const PathWatcherManager = struct { .result = DirectoryRegisterTask.schedule(this, watcher, path) catch |err| return .{ .err = .{ .errno = @truncate(@intFromEnum(switch (err) { - error.OutOfMemory => bun.C.E.NOMEM, - error.UnexpectedFailure => bun.C.E.INVAL, + error.OutOfMemory => bun.sys.E.NOMEM, + error.UnexpectedFailure => bun.sys.E.INVAL, })), }, }, @@ -946,16 +946,16 @@ pub fn watch( default_manager = PathWatcherManager.init(vm) catch |e| { return .{ .err = .{ .errno = @truncate(@intFromEnum(switch (e) { - error.SystemResources, error.LockedMemoryLimitExceeded, error.OutOfMemory => bun.C.E.NOMEM, + error.SystemResources, error.LockedMemoryLimitExceeded, error.OutOfMemory => bun.sys.E.NOMEM, error.ProcessFdQuotaExceeded, error.SystemFdQuotaExceeded, error.ThreadQuotaExceeded, - => bun.C.E.MFILE, + => bun.sys.E.MFILE, - error.Unexpected => bun.C.E.NOMEM, + error.Unexpected => bun.sys.E.NOMEM, - error.KQueueError => bun.C.E.INVAL, + error.KQueueError => bun.sys.E.INVAL, })), .syscall = .watch, } }; @@ -986,29 +986,29 @@ pub fn watch( error.BadPathName, error.InvalidUtf8, error.InvalidWtf8, - => bun.C.E.INVAL, + => bun.sys.E.INVAL, error.OutOfMemory, error.SystemResources, - => bun.C.E.NOMEM, + => bun.sys.E.NOMEM, error.FileNotFound, error.NetworkNotFound, error.NoDevice, - => bun.C.E.NOENT, + => bun.sys.E.NOENT, - error.DeviceBusy => bun.C.E.BUSY, - error.AccessDenied => bun.C.E.PERM, - error.InvalidHandle => bun.C.E.BADF, - error.SymLinkLoop => bun.C.E.LOOP, - error.NotDir => bun.C.E.NOTDIR, + error.DeviceBusy => bun.sys.E.BUSY, + error.AccessDenied => bun.sys.E.PERM, + error.InvalidHandle => bun.sys.E.BADF, + error.SymLinkLoop => bun.sys.E.LOOP, + error.NotDir => bun.sys.E.NOTDIR, error.ProcessFdQuotaExceeded, error.SystemFdQuotaExceeded, error.UserResourceLimitReached, - => bun.C.E.MFILE, + => bun.sys.E.MFILE, - else => bun.C.E.INVAL, + else => bun.sys.E.INVAL, })), .syscall = .watch, } }; diff --git a/src/bun.js/node/time_like.zig b/src/bun.js/node/time_like.zig new file mode 100644 index 0000000000..9c0d5d3f41 --- /dev/null +++ b/src/bun.js/node/time_like.zig @@ -0,0 +1,105 @@ +/// On windows, this is what libuv expects +/// On unix it is what the utimens api expects +pub const TimeLike = if (Environment.isWindows) f64 else std.posix.timespec; + +// Equivalent to `toUnixTimestamp` +// +// Node.js docs: +// > Values can be either numbers representing Unix epoch time in seconds, Dates, or a numeric string like '123456789.0'. +// > If the value can not be converted to a number, or is NaN, Infinity, or -Infinity, an Error will be thrown. +pub fn fromJS(globalObject: *JSGlobalObject, value: JSValue) ?TimeLike { + // Number is most common case + if (value.isNumber()) { + const seconds = value.asNumber(); + if (std.math.isFinite(seconds)) { + if (seconds < 0) { + return fromNow(); + } + return fromSeconds(seconds); + } + return null; + } else switch (value.jsType()) { + .JSDate => { + const milliseconds = value.getUnixTimestamp(); + if (std.math.isFinite(milliseconds)) { + return fromMilliseconds(milliseconds); + } + }, + .String => { + const seconds = value.coerceToDouble(globalObject); + if (std.math.isFinite(seconds)) { + return fromSeconds(seconds); + } + }, + else => {}, + } + return null; +} + +fn fromSeconds(seconds: f64) TimeLike { + if (Environment.isWindows) { + return seconds; + } + return .{ + .sec = @intFromFloat(seconds), + .nsec = @intFromFloat(@mod(seconds, 1) * std.time.ns_per_s), + }; +} + +fn fromMilliseconds(milliseconds: f64) TimeLike { + if (Environment.isWindows) { + return milliseconds / 1000.0; + } + + var sec: f64 = @divFloor(milliseconds, std.time.ms_per_s); + var nsec: f64 = @mod(milliseconds, std.time.ms_per_s) * std.time.ns_per_ms; + + if (nsec < 0) { + nsec += std.time.ns_per_s; + sec -= 1; + } + + return .{ + .sec = @intFromFloat(sec), + .nsec = @intFromFloat(nsec), + }; +} + +fn fromNow() TimeLike { + if (Environment.isWindows) { + const nanos = std.time.nanoTimestamp(); + return @as(TimeLike, @floatFromInt(nanos)) / std.time.ns_per_s; + } + + // Permissions requirements + // To set both file timestamps to the current time (i.e., times is + // NULL, or both tv_nsec fields specify UTIME_NOW), either: + // + // • the caller must have write access to the file; + // + // • the caller's effective user ID must match the owner of the + // file; or + // + // • the caller must have appropriate privileges. + // + // To make any change other than setting both timestamps to the + // current time (i.e., times is not NULL, and neither tv_nsec field + // is UTIME_NOW and neither tv_nsec field is UTIME_OMIT), either + // condition 2 or 3 above must apply. + // + // If both tv_nsec fields are specified as UTIME_OMIT, then no file + // ownership or permission checks are performed, and the file + // timestamps are not modified, but other error conditions may still + return .{ + .sec = 0, + .nsec = if (Environment.isLinux) std.os.linux.UTIME.NOW else bun.c.UTIME_NOW, + }; +} + +const std = @import("std"); +const bun = @import("bun"); +const sys = bun.sys; +const Environment = bun.Environment; +const jsc = bun.jsc; +const JSGlobalObject = jsc.JSGlobalObject; +const JSValue = jsc.JSValue; diff --git a/src/bun.js/node/types.zig b/src/bun.js/node/types.zig index 0b650760d1..d231d6211f 100644 --- a/src/bun.js/node/types.zig +++ b/src/bun.js/node/types.zig @@ -11,7 +11,6 @@ const path_handler = bun.path; const strings = bun.strings; const string = bun.string; -const C = bun.C; const L = strings.literal; const Environment = bun.Environment; const Fs = @import("../../fs.zig"); @@ -21,322 +20,10 @@ const Mode = bun.Mode; const Syscall = bun.sys; const URL = @import("../../url.zig").URL; const Value = std.json.Value; -pub const validators = @import("./util/validators.zig"); const JSError = bun.JSError; -pub const Path = @import("./path.zig"); - -fn typeBaseNameT(comptime T: type) []const u8 { - return meta.typeBaseName(@typeName(T)); -} - -pub const Buffer = JSC.MarkedArrayBuffer; - -/// On windows, this is what libuv expects -/// On unix it is what the utimens api expects -pub const TimeLike = if (Environment.isWindows) f64 else std.posix.timespec; - -/// Node.js expects the error to include contextual information -/// - "syscall" -/// - "path" -/// - "errno" -pub fn Maybe(comptime ReturnTypeT: type, comptime ErrorTypeT: type) type { - // can't call @hasDecl on void, anyerror, etc - const has_any_decls = ErrorTypeT != void and ErrorTypeT != anyerror; - const has_retry = has_any_decls and @hasDecl(ErrorTypeT, "retry"); - const has_todo = has_any_decls and @hasDecl(ErrorTypeT, "todo"); - - return union(Tag) { - pub const ErrorType = ErrorTypeT; - pub const ReturnType = ReturnTypeT; - - err: ErrorType, - result: ReturnType, - - /// NOTE: this has to have a well defined layout (e.g. setting to `u8`) - /// experienced a bug with a Maybe(void, void) - /// creating the `err` variant of this type - /// resulted in Zig incorrectly setting the tag, leading to a switch - /// statement to just not work. - /// we (Zack, Dylan, Chloe, Mason) observed that it was set to 0xFF in ReleaseFast in the debugger - pub const Tag = enum(u8) { err, result }; - - pub const retry: @This() = if (has_retry) .{ .err = ErrorType.retry } else .{ .err = .{} }; - pub const success: @This() = .{ - .result = std.mem.zeroes(ReturnType), - }; - /// This value is technically garbage, but that is okay as `.aborted` is - /// only meant to be returned in an operation when there is an aborted - /// `AbortSignal` object associated with the operation. - pub const aborted: @This() = .{ .err = .{ - .errno = @intFromEnum(posix.E.INTR), - .syscall = .access, - } }; - - pub fn assert(this: @This()) ReturnType { - switch (this) { - .err => |err| { - bun.Output.panic("Unexpected error\n{}", .{err}); - }, - .result => |result| return result, - } - } - - pub inline fn todo() @This() { - if (Environment.allow_assert) { - if (comptime ReturnType == void) { - @panic("TODO called!"); - } - @panic(comptime "TODO: Maybe(" ++ typeBaseNameT(ReturnType) ++ ")"); - } - if (has_todo) { - return .{ .err = ErrorType.todo() }; - } - return .{ .err = ErrorType{} }; - } - - pub fn isTrue(this: @This()) bool { - if (comptime ReturnType != bool) @compileError("This function can only be called on bool"); - return switch (this) { - .result => |r| r, - else => false, - }; - } - - pub fn unwrap(this: @This()) !ReturnType { - return switch (this) { - .result => |r| r, - .err => |e| bun.errnoToZigErr(e.errno), - }; - } - - /// Unwrap the value if it is `result` or use the provided `default_value` - pub inline fn unwrapOr(this: @This(), default_value: ReturnType) ReturnType { - return switch (this) { - .result => |v| v, - .err => default_value, - }; - } - - pub inline fn initErr(e: ErrorType) Maybe(ReturnType, ErrorType) { - return .{ .err = e }; - } - - pub inline fn initErrWithP(e: C.SystemErrno, syscall: Syscall.Tag, path: anytype) Maybe(ReturnType, ErrorType) { - return .{ .err = .{ - .errno = @intFromEnum(e), - .syscall = syscall, - .path = path, - } }; - } - - pub inline fn asErr(this: *const @This()) ?ErrorType { - if (this.* == .err) return this.err; - return null; - } - - pub inline fn asValue(this: *const @This()) ?ReturnType { - if (this.* == .result) return this.result; - return null; - } - - pub inline fn isOk(this: *const @This()) bool { - return switch (this.*) { - .result => true, - .err => false, - }; - } - - pub inline fn isErr(this: *const @This()) bool { - return switch (this.*) { - .result => false, - .err => true, - }; - } - - pub inline fn initResult(result: ReturnType) Maybe(ReturnType, ErrorType) { - return .{ .result = result }; - } - - pub inline fn mapErr(this: @This(), comptime E: type, err_fn: *const fn (ErrorTypeT) E) Maybe(ReturnType, E) { - return switch (this) { - .result => |v| .{ .result = v }, - .err => |e| .{ .err = err_fn(e) }, - }; - } - - pub inline fn toCssResult(this: @This()) Maybe(ReturnType, bun.css.ParseError(bun.css.ParserError)) { - return switch (ErrorTypeT) { - bun.css.BasicParseError => { - return switch (this) { - .result => |v| return .{ .result = v }, - .err => |e| return .{ .err = e.intoDefaultParseError() }, - }; - }, - bun.css.ParseError(bun.css.ParserError) => @compileError("Already a ParseError(ParserError)"), - else => @compileError("Bad!"), - }; - } - - pub fn toJS(this: @This(), globalObject: *JSC.JSGlobalObject) JSC.JSValue { - return switch (this) { - .result => |r| switch (ReturnType) { - JSC.JSValue => r, - - void => .undefined, - bool => JSC.JSValue.jsBoolean(r), - - JSC.ArrayBuffer => r.toJS(globalObject, null), - []u8 => JSC.ArrayBuffer.fromBytes(r, .ArrayBuffer).toJS(globalObject, null), - - else => switch (@typeInfo(ReturnType)) { - .int, .float, .comptime_int, .comptime_float => JSC.JSValue.jsNumber(r), - .@"struct", .@"enum", .@"opaque", .@"union" => r.toJS(globalObject), - .pointer => { - if (bun.trait.isZigString(ReturnType)) - JSC.ZigString.init(bun.asByteSlice(r)).withEncoding().toJS(globalObject); - - return r.toJS(globalObject); - }, - }, - }, - .err => |e| e.toJSC(globalObject), - }; - } - - pub fn toArrayBuffer(this: @This(), globalObject: *JSC.JSGlobalObject) JSC.JSValue { - return switch (this) { - .result => |r| JSC.ArrayBuffer.fromBytes(r, .ArrayBuffer).toJS(globalObject, null), - .err => |e| e.toJSC(globalObject), - }; - } - - pub fn getErrno(this: @This()) posix.E { - return switch (this) { - .result => posix.E.SUCCESS, - .err => |e| @enumFromInt(e.errno), - }; - } - - pub fn errnoSys(rc: anytype, syscall: Syscall.Tag) ?@This() { - if (comptime Environment.isWindows) { - if (comptime @TypeOf(rc) == std.os.windows.NTSTATUS) {} else { - if (rc != 0) return null; - } - } - return switch (Syscall.getErrno(rc)) { - .SUCCESS => null, - else => |e| @This(){ - // always truncate - .err = .{ - .errno = translateToErrInt(e), - .syscall = syscall, - }, - }, - }; - } - - pub fn errno(err: anytype, syscall: Syscall.Tag) @This() { - return @This(){ - // always truncate - .err = .{ - .errno = translateToErrInt(err), - .syscall = syscall, - }, - }; - } - - pub fn errnoSysFd(rc: anytype, syscall: Syscall.Tag, fd: bun.FileDescriptor) ?@This() { - if (comptime Environment.isWindows) { - if (comptime @TypeOf(rc) == std.os.windows.NTSTATUS) {} else { - if (rc != 0) return null; - } - } - return switch (Syscall.getErrno(rc)) { - .SUCCESS => null, - else => |e| @This(){ - // Always truncate - .err = .{ - .errno = translateToErrInt(e), - .syscall = syscall, - .fd = fd, - }, - }, - }; - } - - pub fn errnoSysP(rc: anytype, syscall: Syscall.Tag, path: anytype) ?@This() { - if (bun.meta.Item(@TypeOf(path)) == u16) { - @compileError("Do not pass WString path to errnoSysP, it needs the path encoded as utf8"); - } - if (comptime Environment.isWindows) { - if (comptime @TypeOf(rc) == std.os.windows.NTSTATUS) {} else { - if (rc != 0) return null; - } - } - return switch (Syscall.getErrno(rc)) { - .SUCCESS => null, - else => |e| @This(){ - // Always truncate - .err = .{ - .errno = translateToErrInt(e), - .syscall = syscall, - .path = bun.asByteSlice(path), - }, - }, - }; - } - - pub fn errnoSysFP(rc: anytype, syscall: Syscall.Tag, fd: bun.FileDescriptor, path: anytype) ?@This() { - if (comptime Environment.isWindows) { - if (comptime @TypeOf(rc) == std.os.windows.NTSTATUS) {} else { - if (rc != 0) return null; - } - } - return switch (Syscall.getErrno(rc)) { - .SUCCESS => null, - else => |e| @This(){ - // Always truncate - .err = .{ - .errno = translateToErrInt(e), - .syscall = syscall, - .fd = fd, - .path = bun.asByteSlice(path), - }, - }, - }; - } - - pub fn errnoSysPD(rc: anytype, syscall: Syscall.Tag, path: anytype, dest: anytype) ?@This() { - if (bun.meta.Item(@TypeOf(path)) == u16) { - @compileError("Do not pass WString path to errnoSysPD, it needs the path encoded as utf8"); - } - if (comptime Environment.isWindows) { - if (comptime @TypeOf(rc) == std.os.windows.NTSTATUS) {} else { - if (rc != 0) return null; - } - } - return switch (Syscall.getErrno(rc)) { - .SUCCESS => null, - else => |e| @This(){ - // Always truncate - .err = .{ - .errno = translateToErrInt(e), - .syscall = syscall, - .path = bun.asByteSlice(path), - .dest = bun.asByteSlice(dest), - }, - }, - }; - } - }; -} - -fn translateToErrInt(err: anytype) bun.sys.Error.Int { - return switch (@TypeOf(err)) { - bun.windows.NTSTATUS => @intFromEnum(bun.windows.translateNTStatusToErrno(err)), - else => @truncate(@intFromEnum(err)), - }; -} +const node = bun.api.node; +const Buffer = node.Buffer; +const ArgumentsSlice = JSC.CallFrame.ArgumentsSlice; pub const BlobOrStringOrBuffer = union(enum) { blob: JSC.WebCore.Blob, @@ -651,13 +338,8 @@ pub const StringOrBuffer = union(enum) { } }; -pub const ErrorCode = @import("./nodejs_error_code.zig").Code; - -// We can't really use Zig's error handling for syscalls because Node.js expects the "real" errno to be returned -// and various issues with std.posix that make it too unstable for arbitrary user input (e.g. how .BADF is marked as unreachable) - /// https://github.com/nodejs/node/blob/master/lib/buffer.js#L587 -/// See `JSC.WebCore.Encoder` for encoding and decoding functions. +/// See `JSC.WebCore.encoding` for encoding and decoding functions. /// must match src/bun.js/bindings/BufferEncodingType.h pub const Encoding = enum(u8) { utf8, @@ -752,7 +434,7 @@ pub const Encoding = enum(u8) { return JSC.ArrayBuffer.createBuffer(globalObject, input); }, inline else => |enc| { - const res = JSC.WebCore.Encoder.toStringComptime(input, globalObject, enc); + const res = JSC.WebCore.encoding.toStringComptime(input, globalObject, enc); if (res.isError()) { return globalObject.throwValue(res) catch .zero; } @@ -786,7 +468,7 @@ pub const Encoding = enum(u8) { return JSC.ArrayBuffer.createBuffer(globalObject, input); }, inline else => |enc| { - const res = JSC.WebCore.Encoder.toStringComptime(input, globalObject, enc); + const res = JSC.WebCore.encoding.toStringComptime(input, globalObject, enc); if (res.isError()) { return globalObject.throwValue(res) catch .zero; } @@ -1152,190 +834,9 @@ pub const VectorArrayBuffer = struct { } }; -pub const ArgumentsSlice = struct { - remaining: []const JSC.JSValue, - vm: *JSC.VirtualMachine, - arena: bun.ArenaAllocator = bun.ArenaAllocator.init(bun.default_allocator), - all: []const JSC.JSValue, - threw: bool = false, - protected: bun.bit_set.IntegerBitSet(32) = bun.bit_set.IntegerBitSet(32).initEmpty(), - will_be_async: bool = false, - - pub fn unprotect(this: *ArgumentsSlice) void { - var iter = this.protected.iterator(.{}); - const ctx = this.vm.global; - while (iter.next()) |i| { - JSC.C.JSValueUnprotect(ctx, this.all[i].asObjectRef()); - } - this.protected = bun.bit_set.IntegerBitSet(32).initEmpty(); - } - - pub fn deinit(this: *ArgumentsSlice) void { - this.unprotect(); - this.arena.deinit(); - } - - pub fn protectEat(this: *ArgumentsSlice) void { - if (this.remaining.len == 0) return; - const index = this.all.len - this.remaining.len; - this.protected.set(index); - JSC.C.JSValueProtect(this.vm.global, this.all[index].asObjectRef()); - this.eat(); - } - - pub fn protectEatNext(this: *ArgumentsSlice) ?JSC.JSValue { - if (this.remaining.len == 0) return null; - return this.nextEat(); - } - - pub fn from(vm: *JSC.VirtualMachine, arguments: []const JSC.JSValueRef) ArgumentsSlice { - return init(vm, @as([*]const JSC.JSValue, @ptrCast(arguments.ptr))[0..arguments.len]); - } - pub fn init(vm: *JSC.VirtualMachine, arguments: []const JSC.JSValue) ArgumentsSlice { - return ArgumentsSlice{ - .remaining = arguments, - .vm = vm, - .all = arguments, - .arena = bun.ArenaAllocator.init(vm.allocator), - }; - } - - pub fn initAsync(vm: *JSC.VirtualMachine, arguments: []const JSC.JSValue) ArgumentsSlice { - return ArgumentsSlice{ - .remaining = bun.default_allocator.dupe(JSC.JSValue, arguments), - .vm = vm, - .all = arguments, - .arena = bun.ArenaAllocator.init(bun.default_allocator), - }; - } - - pub inline fn len(this: *const ArgumentsSlice) u16 { - return @as(u16, @truncate(this.remaining.len)); - } - pub fn eat(this: *ArgumentsSlice) void { - if (this.remaining.len == 0) { - return; - } - - this.remaining = this.remaining[1..]; - } - - /// Peek the next argument without eating it - pub fn next(this: *ArgumentsSlice) ?JSC.JSValue { - if (this.remaining.len == 0) { - return null; - } - - return this.remaining[0]; - } - - pub fn nextEat(this: *ArgumentsSlice) ?JSC.JSValue { - if (this.remaining.len == 0) { - return null; - } - defer this.eat(); - return this.remaining[0]; - } -}; - -// Equivalent to `toUnixTimestamp` -// -// Node.js docs: -// > Values can be either numbers representing Unix epoch time in seconds, Dates, or a numeric string like '123456789.0'. -// > If the value can not be converted to a number, or is NaN, Infinity, or -Infinity, an Error will be thrown. -pub fn timeLikeFromJS(globalObject: *JSC.JSGlobalObject, value: JSC.JSValue) ?TimeLike { - // Number is most common case - if (value.isNumber()) { - const seconds = value.asNumber(); - if (std.math.isFinite(seconds)) { - if (seconds < 0) { - return timeLikeFromNow(); - } - return timeLikeFromSeconds(seconds); - } - return null; - } else switch (value.jsType()) { - .JSDate => { - const milliseconds = value.getUnixTimestamp(); - if (std.math.isFinite(milliseconds)) { - return timeLikeFromMilliseconds(milliseconds); - } - }, - .String => { - const seconds = value.coerceToDouble(globalObject); - if (std.math.isFinite(seconds)) { - return timeLikeFromSeconds(seconds); - } - }, - else => {}, - } - return null; -} - -fn timeLikeFromSeconds(seconds: f64) TimeLike { - if (Environment.isWindows) { - return seconds; - } - return .{ - .sec = @intFromFloat(seconds), - .nsec = @intFromFloat(@mod(seconds, 1) * std.time.ns_per_s), - }; -} - -fn timeLikeFromMilliseconds(milliseconds: f64) TimeLike { - if (Environment.isWindows) { - return milliseconds / 1000.0; - } - - var sec: f64 = @divFloor(milliseconds, std.time.ms_per_s); - var nsec: f64 = @mod(milliseconds, std.time.ms_per_s) * std.time.ns_per_ms; - - if (nsec < 0) { - nsec += std.time.ns_per_s; - sec -= 1; - } - - return .{ - .sec = @intFromFloat(sec), - .nsec = @intFromFloat(nsec), - }; -} - -fn timeLikeFromNow() TimeLike { - if (Environment.isWindows) { - const nanos = std.time.nanoTimestamp(); - return @as(TimeLike, @floatFromInt(nanos)) / std.time.ns_per_s; - } - - // Permissions requirements - // To set both file timestamps to the current time (i.e., times is - // NULL, or both tv_nsec fields specify UTIME_NOW), either: - // - // • the caller must have write access to the file; - // - // • the caller's effective user ID must match the owner of the - // file; or - // - // • the caller must have appropriate privileges. - // - // To make any change other than setting both timestamps to the - // current time (i.e., times is not NULL, and neither tv_nsec field - // is UTIME_NOW and neither tv_nsec field is UTIME_OMIT), either - // condition 2 or 3 above must apply. - // - // If both tv_nsec fields are specified as UTIME_OMIT, then no file - // ownership or permission checks are performed, and the file - // timestamps are not modified, but other error conditions may still - return .{ - .sec = 0, - .nsec = if (Environment.isLinux) std.os.linux.UTIME.NOW else bun.c.UTIME_NOW, - }; -} - pub fn modeFromJS(ctx: *JSC.JSGlobalObject, value: JSC.JSValue) bun.JSError!?Mode { const mode_int = if (value.isNumber()) brk: { - const m = try validators.validateUint32(ctx, value, "mode", .{}, false); - break :brk @as(Mode, @truncate(m)); + break :brk try node.validators.validateUint32(ctx, value, "mode", .{}, false); } else brk: { if (value.isUndefinedOrNull()) return null; @@ -1363,7 +864,7 @@ pub fn modeFromJS(ctx: *JSC.JSGlobalObject, value: JSC.JSValue) bun.JSError!?Mod }; }; - return mode_int & 0o777; + return @truncate(mode_int & 0o777); } pub const PathOrFileDescriptor = union(Tag) { @@ -1670,300 +1171,13 @@ pub const Dirent = struct { } }; -pub const Process = struct { - pub fn getArgv0(globalObject: *JSC.JSGlobalObject) callconv(.C) JSC.JSValue { - return JSC.ZigString.fromUTF8(bun.argv[0]).toJS(globalObject); - } - - pub fn getExecPath(globalObject: *JSC.JSGlobalObject) callconv(.C) JSC.JSValue { - const out = bun.selfExePath() catch { - // if for any reason we are unable to get the executable path, we just return argv[0] - return getArgv0(globalObject); - }; - - return JSC.ZigString.fromUTF8(out).toJS(globalObject); - } - - pub fn getExecArgv(globalObject: *JSC.JSGlobalObject) callconv(.C) JSC.JSValue { - var sfb = std.heap.stackFallback(4096, globalObject.allocator()); - const temp_alloc = sfb.get(); - const vm = globalObject.bunVM(); - - if (vm.worker) |worker| { - // was explicitly overridden for the worker? - if (worker.execArgv) |execArgv| { - const array = JSC.JSValue.createEmptyArray(globalObject, execArgv.len); - for (0..execArgv.len) |i| { - array.putIndex(globalObject, @intCast(i), bun.String.init(execArgv[i]).toJS(globalObject)); - } - return array; - } - } - - var args = std.ArrayList(bun.String).initCapacity(temp_alloc, bun.argv.len - 1) catch bun.outOfMemory(); - defer args.deinit(); - defer for (args.items) |*arg| arg.deref(); - - var seen_run = false; - var prev: ?[]const u8 = null; - - // we re-parse the process argv to extract execArgv, since this is a very uncommon operation - // it isn't worth doing this as a part of the CLI - for (bun.argv[@min(1, bun.argv.len)..]) |arg| { - defer prev = arg; - - if (arg.len >= 1 and arg[0] == '-') { - args.append(bun.String.createUTF8(arg)) catch bun.outOfMemory(); - continue; - } - - if (!seen_run and bun.strings.eqlComptime(arg, "run")) { - seen_run = true; - continue; - } - - // A set of execArgv args consume an extra argument, so we do not want to - // confuse these with script names. - const map = bun.ComptimeStringMap(void, comptime brk: { - const auto_params = bun.CLI.Arguments.auto_params; - const KV = struct { []const u8, void }; - var entries: [auto_params.len]KV = undefined; - var i = 0; - for (auto_params) |param| { - if (param.takes_value != .none) { - if (param.names.long) |name| { - entries[i] = .{ "--" ++ name, {} }; - i += 1; - } - if (param.names.short) |name| { - entries[i] = .{ &[_]u8{ '-', name }, {} }; - i += 1; - } - } - } - - var result: [i]KV = undefined; - @memcpy(&result, entries[0..i]); - break :brk result; - }); - - if (prev) |p| if (map.has(p)) { - args.append(bun.String.createUTF8(arg)) catch @panic("OOM"); - continue; - }; - - // we hit the script name - break; - } - - return bun.String.toJSArray(globalObject, args.items); - } - - pub fn getArgv(globalObject: *JSC.JSGlobalObject) callconv(.C) JSC.JSValue { - const vm = globalObject.bunVM(); - - // Allocate up to 32 strings in stack - var stack_fallback_allocator = std.heap.stackFallback( - 32 * @sizeOf(JSC.ZigString) + (bun.MAX_PATH_BYTES + 1) + 32, - heap_allocator, - ); - const allocator = stack_fallback_allocator.get(); - - var args_count: usize = vm.argv.len; - if (vm.worker) |worker| { - args_count = worker.argv.len; - } - - const args = allocator.alloc( - bun.String, - // argv omits "bun" because it could be "bun run" or "bun" and it's kind of ambiguous - // argv also omits the script name - args_count + 2, - ) catch bun.outOfMemory(); - var args_list: std.ArrayListUnmanaged(bun.String) = .initBuffer(args); - - if (vm.standalone_module_graph != null) { - // Don't break user's code because they did process.argv.slice(2) - // Even if they didn't type "bun", we still want to add it as argv[0] - args_list.appendAssumeCapacity( - bun.String.static("bun"), - ); - } else { - const exe_path = bun.selfExePath() catch null; - args_list.appendAssumeCapacity( - if (exe_path) |str| bun.String.fromUTF8(str) else bun.String.static("bun"), - ); - } - - if (vm.main.len > 0 and - !strings.endsWithComptime(vm.main, bun.pathLiteral("/[eval]")) and - !strings.endsWithComptime(vm.main, bun.pathLiteral("/[stdin]"))) - { - if (vm.worker != null and vm.worker.?.eval_mode) { - args_list.appendAssumeCapacity(bun.String.static("[worker eval]")); - } else { - args_list.appendAssumeCapacity(bun.String.fromUTF8(vm.main)); - } - } - - defer allocator.free(args); - - if (vm.worker) |worker| { - for (worker.argv) |arg| { - args_list.appendAssumeCapacity(bun.String.init(arg)); - } - } else { - for (vm.argv) |arg| { - const str = bun.String.fromUTF8(arg); - // https://github.com/yargs/yargs/blob/adb0d11e02c613af3d9427b3028cc192703a3869/lib/utils/process-argv.ts#L1 - args_list.appendAssumeCapacity(str); - } - } - - return bun.String.toJSArray(globalObject, args_list.items); - } - - pub fn getCwd(globalObject: *JSC.JSGlobalObject) callconv(.C) JSC.JSValue { - return JSC.toJSHostValue(globalObject, getCwd_(globalObject)); - } - fn getCwd_(globalObject: *JSC.JSGlobalObject) bun.JSError!JSC.JSValue { - var buf: bun.PathBuffer = undefined; - switch (Path.getCwd(&buf)) { - .result => |r| return JSC.ZigString.init(r).withEncoding().toJS(globalObject), - .err => |e| { - return globalObject.throwValue(e.toJSC(globalObject)); - }, - } - } - - pub fn setCwd(globalObject: *JSC.JSGlobalObject, to: *JSC.ZigString) callconv(.C) JSC.JSValue { - return JSC.toJSHostValue(globalObject, setCwd_(globalObject, to)); - } - fn setCwd_(globalObject: *JSC.JSGlobalObject, to: *JSC.ZigString) bun.JSError!JSC.JSValue { - if (to.len == 0) { - return globalObject.throwInvalidArguments("Expected path to be a non-empty string", .{}); - } - const vm = globalObject.bunVM(); - const fs = vm.transpiler.fs; - - var buf: bun.PathBuffer = undefined; - const slice = to.sliceZBuf(&buf) catch return globalObject.throw("Invalid path", .{}); - - switch (Syscall.chdir(fs.top_level_dir, slice)) { - .result => { - // When we update the cwd from JS, we have to update the bundler's version as well - // However, this might be called many times in a row, so we use a pre-allocated buffer - // that way we don't have to worry about garbage collector - const into_cwd_buf = switch (bun.sys.getcwd(&buf)) { - .result => |r| r, - .err => |err| { - _ = Syscall.chdir(fs.top_level_dir, fs.top_level_dir); - return globalObject.throwValue(err.toJSC(globalObject)); - }, - }; - @memcpy(fs.top_level_dir_buf[0..into_cwd_buf.len], into_cwd_buf); - fs.top_level_dir_buf[into_cwd_buf.len] = 0; - fs.top_level_dir = fs.top_level_dir_buf[0..into_cwd_buf.len :0]; - - const len = fs.top_level_dir.len; - // Ensure the path ends with a slash - if (fs.top_level_dir_buf[len - 1] != std.fs.path.sep) { - fs.top_level_dir_buf[len] = std.fs.path.sep; - fs.top_level_dir_buf[len + 1] = 0; - fs.top_level_dir = fs.top_level_dir_buf[0 .. len + 1 :0]; - } - const withoutTrailingSlash = if (Environment.isWindows) strings.withoutTrailingSlashWindowsPath else strings.withoutTrailingSlash; - var str = bun.String.createUTF8(withoutTrailingSlash(fs.top_level_dir)); - return str.transferToJS(globalObject); - }, - .err => |e| { - return globalObject.throwValue(e.toJSC(globalObject)); - }, - } - } - - // TODO(@190n) this may need to be noreturn - pub fn exit(globalObject: *JSC.JSGlobalObject, code: u8) callconv(.c) void { - var vm = globalObject.bunVM(); - if (vm.worker) |worker| { - vm.exit_handler.exit_code = code; - worker.requestTerminate(); - return; - } - - vm.exit_handler.exit_code = code; - vm.onExit(); - vm.globalExit(); - } - - // TODO: switch this to using *bun.wtf.String when it is added - pub fn Bun__Process__editWindowsEnvVar(k: bun.String, v: bun.String) callconv(.C) void { - comptime bun.assert(bun.Environment.isWindows); - if (k.tag == .Empty) return; - const wtf1 = k.value.WTFStringImpl; - var fixed_stack_allocator = std.heap.stackFallback(1025, bun.default_allocator); - const allocator = fixed_stack_allocator.get(); - var buf1 = allocator.alloc(u16, k.utf16ByteLength() + 1) catch bun.outOfMemory(); - defer allocator.free(buf1); - var buf2 = allocator.alloc(u16, v.utf16ByteLength() + 1) catch bun.outOfMemory(); - defer allocator.free(buf2); - const len1: usize = switch (wtf1.is8Bit()) { - true => bun.strings.copyLatin1IntoUTF16([]u16, buf1, []const u8, wtf1.latin1Slice()).written, - false => b: { - @memcpy(buf1[0..wtf1.length()], wtf1.utf16Slice()); - break :b wtf1.length(); - }, - }; - buf1[len1] = 0; - const str2: ?[*:0]const u16 = if (v.tag != .Dead) str: { - if (v.tag == .Empty) break :str (&[_]u16{0})[0..0 :0]; - const wtf2 = v.value.WTFStringImpl; - const len2: usize = switch (wtf2.is8Bit()) { - true => bun.strings.copyLatin1IntoUTF16([]u16, buf2, []const u8, wtf2.latin1Slice()).written, - false => b: { - @memcpy(buf2[0..wtf2.length()], wtf2.utf16Slice()); - break :b wtf2.length(); - }, - }; - buf2[len2] = 0; - break :str buf2[0..len2 :0].ptr; - } else null; - _ = bun.c.SetEnvironmentVariableW(buf1[0..len1 :0].ptr, str2); - } - - comptime { - if (Environment.export_cpp_apis and Environment.isWindows) { - @export(&Bun__Process__editWindowsEnvVar, .{ .name = "Bun__Process__editWindowsEnvVar" }); - } - } - - pub export const Bun__version: [*:0]const u8 = "v" ++ bun.Global.package_json_version; - pub export const Bun__version_with_sha: [*:0]const u8 = "v" ++ bun.Global.package_json_version_with_sha; - pub export const Bun__versions_boringssl: [*:0]const u8 = bun.Global.versions.boringssl; - pub export const Bun__versions_libarchive: [*:0]const u8 = bun.Global.versions.libarchive; - pub export const Bun__versions_mimalloc: [*:0]const u8 = bun.Global.versions.mimalloc; - pub export const Bun__versions_picohttpparser: [*:0]const u8 = bun.Global.versions.picohttpparser; - pub export const Bun__versions_uws: [*:0]const u8 = bun.Environment.git_sha; - pub export const Bun__versions_webkit: [*:0]const u8 = bun.Global.versions.webkit; - pub export const Bun__versions_zig: [*:0]const u8 = bun.Global.versions.zig; - pub export const Bun__versions_zlib: [*:0]const u8 = bun.Global.versions.zlib; - pub export const Bun__versions_tinycc: [*:0]const u8 = bun.Global.versions.tinycc; - pub export const Bun__versions_lolhtml: [*:0]const u8 = bun.Global.versions.lolhtml; - pub export const Bun__versions_c_ares: [*:0]const u8 = bun.Global.versions.c_ares; - pub export const Bun__versions_libdeflate: [*:0]const u8 = bun.Global.versions.libdeflate; - pub export const Bun__versions_usockets: [*:0]const u8 = bun.Environment.git_sha; - pub export const Bun__version_sha: [*:0]const u8 = bun.Environment.git_sha; - pub export const Bun__versions_lshpack: [*:0]const u8 = bun.Global.versions.lshpack; - pub export const Bun__versions_zstd: [*:0]const u8 = bun.Global.versions.zstd; -}; - pub const PathOrBlob = union(enum) { path: JSC.Node.PathOrFileDescriptor, blob: Blob, const Blob = JSC.WebCore.Blob; - pub fn fromJSNoCopy(ctx: *JSC.JSGlobalObject, args: *JSC.Node.ArgumentsSlice) bun.JSError!PathOrBlob { + pub fn fromJSNoCopy(ctx: *JSC.JSGlobalObject, args: *JSC.CallFrame.ArgumentsSlice) bun.JSError!PathOrBlob { if (try JSC.Node.PathOrFileDescriptor.fromJS(ctx, args, bun.default_allocator)) |path| { return PathOrBlob{ .path = path, @@ -1981,16 +1195,3 @@ pub const PathOrBlob = union(enum) { return ctx.throwInvalidArgumentTypeValue("destination", "path, file descriptor, or Blob", arg); } }; - -pub const uid_t = if (Environment.isPosix) std.posix.uid_t else bun.windows.libuv.uv_uid_t; -pub const gid_t = if (Environment.isPosix) std.posix.gid_t else bun.windows.libuv.uv_gid_t; - -const stat = @import("./Stat.zig"); -pub const Stats = stat.Stats; -pub const StatsBig = stat.StatsBig; -pub const StatsSmall = stat.StatsSmall; - -const statfs = @import("./StatFS.zig"); -pub const StatFSSmall = statfs.StatFSSmall; -pub const StatFSBig = statfs.StatFSBig; -pub const StatFS = statfs.StatFS; diff --git a/src/bun.js/node/util/parse_args.zig b/src/bun.js/node/util/parse_args.zig index 816ad73ad2..99eee3ca17 100644 --- a/src/bun.js/node/util/parse_args.zig +++ b/src/bun.js/node/util/parse_args.zig @@ -155,7 +155,7 @@ fn getDefaultArgs(globalThis: *JSGlobalObject) !ArgsSlice { // Work out where to slice process.argv for user supplied arguments // Check options for scenarios where user CLI args follow executable - const argv: JSValue = JSC.Node.Process.getArgv(globalThis); + const argv: JSValue = bun.api.node.process.getArgv(globalThis); //var found = false; //var iter = argv.arrayIterator(globalThis); @@ -183,19 +183,17 @@ fn checkOptionLikeValue(globalThis: *JSGlobalObject, token: OptionToken) bun.JSE // Only show short example if user used short option. var err: JSValue = undefined; if (token.raw.asBunString(globalThis).hasPrefixComptime("--")) { - err = JSC.toTypeError( + err = globalThis.toTypeError( .PARSE_ARGS_INVALID_OPTION_VALUE, "Option '{}' argument is ambiguous.\nDid you forget to specify the option argument for '{}'?\nTo specify an option argument starting with a dash use '{}=-XYZ'.", .{ raw_name, raw_name, raw_name }, - globalThis, ); } else { const token_name = token.name.asBunString(globalThis); - err = JSC.toTypeError( + err = globalThis.toTypeError( .PARSE_ARGS_INVALID_OPTION_VALUE, "Option '{}' argument is ambiguous.\nDid you forget to specify the option argument for '{}'?\nTo specify an option argument starting with a dash use '--{}=-XYZ' or '{}-XYZ'.", .{ raw_name, raw_name, token_name, raw_name }, - globalThis, ); } return globalThis.throwValue(err); @@ -208,7 +206,7 @@ fn checkOptionUsage(globalThis: *JSGlobalObject, options: []const OptionDefiniti const option = options[option_idx]; switch (option.type) { .string => if (token.value == .jsvalue and !token.value.jsvalue.isString()) { - const err = JSC.toTypeError( + const err = globalThis.toTypeError( .PARSE_ARGS_INVALID_OPTION_VALUE, "Option '{s}{s}{s}--{s} ' argument missing", .{ @@ -217,12 +215,11 @@ fn checkOptionUsage(globalThis: *JSGlobalObject, options: []const OptionDefiniti if (!option.short_name.isEmpty()) ", " else "", token.name.asBunString(globalThis), }, - globalThis, ); return globalThis.throwValue(err); }, .boolean => if (token.value != .jsvalue or !token.value.jsvalue.isUndefined()) { - const err = JSC.toTypeError( + const err = globalThis.toTypeError( .PARSE_ARGS_INVALID_OPTION_VALUE, "Option '{s}{s}{s}--{s}' does not take an argument", .{ @@ -231,7 +228,6 @@ fn checkOptionUsage(globalThis: *JSGlobalObject, options: []const OptionDefiniti if (!option.short_name.isEmpty()) ", " else "", token.name.asBunString(globalThis), }, - globalThis, ); return globalThis.throwValue(err); }, @@ -239,16 +235,14 @@ fn checkOptionUsage(globalThis: *JSGlobalObject, options: []const OptionDefiniti } else { const raw_name = OptionToken.RawNameFormatter{ .token = token, .globalThis = globalThis }; - const err = if (allow_positionals) (JSC.toTypeError( + const err = if (allow_positionals) (globalThis.toTypeError( .PARSE_ARGS_UNKNOWN_OPTION, "Unknown option '{}'. To specify a positional argument starting with a '-', place it at the end of the command after '--', as in '-- \"{}\"", .{ raw_name, raw_name }, - globalThis, - )) else (JSC.toTypeError( + )) else (globalThis.toTypeError( .PARSE_ARGS_UNKNOWN_OPTION, "Unknown option '{}'", .{raw_name}, - globalThis, )); return globalThis.throwValue(err); } @@ -316,7 +310,7 @@ fn parseOptionDefinitions(globalThis: *JSGlobalObject, options_obj: JSValue, opt try validators.validateString(globalThis, short_option, "options.{s}.short", .{option.long_name}); var short_option_str = try short_option.toBunString(globalThis); if (short_option_str.length() != 1) { - const err = JSC.toTypeError(.INVALID_ARG_VALUE, "options.{s}.short must be a single character", .{option.long_name}, globalThis); + const err = globalThis.toTypeError(.INVALID_ARG_VALUE, "options.{s}.short must be a single character", .{option.long_name}); return globalThis.throwValue(err); } option.short_name = short_option_str; @@ -578,11 +572,10 @@ const ParseArgsState = struct { }, .positional => |token| { if (!this.allow_positionals) { - const err = JSC.toTypeError( + const err = globalThis.toTypeError( .PARSE_ARGS_UNEXPECTED_POSITIONAL, "Unexpected argument '{s}'. This command does not take positional arguments", .{token.value.asBunString(globalThis)}, - globalThis, ); return globalThis.throwValue(err); } @@ -645,7 +638,7 @@ pub fn parseArgs( } comptime { - const parseArgsFn = JSC.toJSHostFunction(parseArgs); + const parseArgsFn = JSC.toJSHostFn(parseArgs); @export(&parseArgsFn, .{ .name = "Bun__NodeUtil__jsParseArgs" }); } diff --git a/src/bun.js/node/win_watcher.zig b/src/bun.js/node/win_watcher.zig index b891ac23af..9edb7399fc 100644 --- a/src/bun.js/node/win_watcher.zig +++ b/src/bun.js/node/win_watcher.zig @@ -12,7 +12,7 @@ const StoredFileDescriptorType = bun.StoredFileDescriptorType; const Output = bun.Output; const Watcher = bun.Watcher; -const FSWatcher = bun.JSC.Node.FSWatcher; +const FSWatcher = bun.JSC.Node.fs.Watcher; const EventType = @import("./path_watcher.zig").PathWatcher.EventType; const Event = FSWatcher.Event; @@ -81,8 +81,8 @@ pub const PathWatcherManager = struct { } }; -const onPathUpdateFn = JSC.Node.FSWatcher.onPathUpdate; -const onUpdateEndFn = JSC.Node.FSWatcher.onUpdateEnd; +const onPathUpdateFn = JSC.Node.fs.Watcher.onPathUpdate; +const onUpdateEndFn = JSC.Node.fs.Watcher.onUpdateEnd; pub const PathWatcher = struct { handle: uv.uv_fs_event_t, @@ -185,7 +185,7 @@ pub const PathWatcher = struct { var outbuf: bun.PathBuffer = undefined; const event_path = switch (bun.sys.readlink(path, &outbuf)) { .err => |err| brk: { - if (err.errno == @intFromEnum(bun.C.E.NOENT)) { + if (err.errno == @intFromEnum(bun.sys.E.NOENT)) { return .{ .err = .{ .errno = err.errno, .syscall = .open, diff --git a/src/bun.js/rare_data.zig b/src/bun.js/rare_data.zig index 55139250f7..88422b4073 100644 --- a/src/bun.js/rare_data.zig +++ b/src/bun.js/rare_data.zig @@ -16,14 +16,14 @@ const Async = bun.Async; const StatWatcherScheduler = @import("./node/node_fs_stat_watcher.zig").StatWatcherScheduler; const IPC = @import("./ipc.zig"); const uws = bun.uws; - +const api = bun.api; boring_ssl_engine: ?*BoringSSL.ENGINE = null, editor_context: EditorContext = EditorContext{}, stderr_store: ?*Blob.Store = null, stdin_store: ?*Blob.Store = null, stdout_store: ?*Blob.Store = null, -postgresql_context: JSC.Postgres.PostgresSQLContext = .{}, +postgresql_context: bun.api.Postgres.PostgresSQLContext = .{}, entropy_cache: ?*EntropyCache = null, @@ -35,7 +35,7 @@ cleanup_hooks: std.ArrayListUnmanaged(CleanupHook) = .{}, file_polls_: ?*Async.FilePoll.Store = null, -global_dns_data: ?*JSC.DNS.GlobalData = null, +global_dns_data: ?*bun.api.DNS.GlobalData = null, spawn_ipc_usockets_context: ?*uws.SocketContext = null, @@ -335,7 +335,7 @@ pub fn stderr(rare: *RareData) *Blob.Store { .ref_count = std.atomic.Value(u32).init(2), .allocator = default_allocator, .data = .{ - .file = Blob.FileStore{ + .file = .{ .pathlike = .{ .fd = fd, }, @@ -366,7 +366,7 @@ pub fn stdout(rare: *RareData) *Blob.Store { .ref_count = std.atomic.Value(u32).init(2), .allocator = default_allocator, .data = .{ - .file = Blob.FileStore{ + .file = .{ .pathlike = .{ .fd = fd, }, @@ -396,7 +396,7 @@ pub fn stdin(rare: *RareData) *Blob.Store { .allocator = default_allocator, .ref_count = std.atomic.Value(u32).init(2), .data = .{ - .file = Blob.FileStore{ + .file = .{ .pathlike = .{ .fd = fd }, .is_atty = if (fd.unwrapValid()) |valid| std.posix.isatty(valid.native()) else false, .mode = mode, @@ -442,9 +442,9 @@ pub fn spawnIPCContext(rare: *RareData, vm: *JSC.VirtualMachine) *uws.SocketCont return ctx; } -pub fn globalDNSResolver(rare: *RareData, vm: *JSC.VirtualMachine) *JSC.DNS.DNSResolver { +pub fn globalDNSResolver(rare: *RareData, vm: *JSC.VirtualMachine) *api.DNS.DNSResolver { if (rare.global_dns_data == null) { - rare.global_dns_data = JSC.DNS.GlobalData.init(vm.allocator, vm); + rare.global_dns_data = api.DNS.GlobalData.init(vm.allocator, vm); rare.global_dns_data.?.resolver.ref(); // live forever } diff --git a/src/bun.js/test/expect.zig b/src/bun.js/test/expect.zig index db0ffd1945..fabf586c5a 100644 --- a/src/bun.js/test/expect.zig +++ b/src/bun.js/test/expect.zig @@ -4711,7 +4711,7 @@ pub const Expect = struct { // Even though they point to the same native functions for all matchers, // multiple instances are created because each instance will hold the matcher_fn as a property - const wrapper_fn = Bun__JSWrappingFunction__create(globalThis, matcher_name, JSC.toJSHostFunction(Expect.applyCustomMatcher), matcher_fn, true); + const wrapper_fn = Bun__JSWrappingFunction__create(globalThis, matcher_name, JSC.toJSHostFn(Expect.applyCustomMatcher), matcher_fn, true); expect_proto.put(globalThis, matcher_name, wrapper_fn); expect_constructor.put(globalThis, matcher_name, wrapper_fn); @@ -5721,7 +5721,7 @@ extern fn JSMockFunction__getCalls(JSValue) JSValue; /// If there were no calls, it returns an empty JSArray* extern fn JSMockFunction__getReturns(JSValue) JSValue; -extern fn Bun__JSWrappingFunction__create(globalThis: *JSGlobalObject, symbolName: *const bun.String, functionPointer: JSC.JSHostFunctionPtr, wrappedFn: JSValue, strong: bool) JSValue; +extern fn Bun__JSWrappingFunction__create(globalThis: *JSGlobalObject, symbolName: *const bun.String, functionPointer: *const JSC.JSHostFn, wrappedFn: JSValue, strong: bool) JSValue; extern fn Bun__JSWrappingFunction__getWrappedFunction(this: JSValue, globalThis: *JSGlobalObject) JSValue; extern fn ExpectMatcherUtils__getSingleton(globalThis: *JSGlobalObject) JSValue; diff --git a/src/bun.js/test/jest.zig b/src/bun.js/test/jest.zig index 2c228efbb1..23a38d3690 100644 --- a/src/bun.js/test/jest.zig +++ b/src/bun.js/test/jest.zig @@ -28,9 +28,7 @@ const strings = bun.strings; const string = bun.string; const default_allocator = bun.default_allocator; const FeatureFlags = bun.FeatureFlags; -const ArrayBuffer = @import("../base.zig").ArrayBuffer; -const Properties = @import("../base.zig").Properties; -const getAllocator = @import("../base.zig").getAllocator; +const ArrayBuffer = JSC.ArrayBuffer; const RegularExpression = bun.RegularExpression; const ZigString = JSC.ZigString; @@ -82,7 +80,7 @@ pub const TestRunner = struct { // from `setDefaultTimeout() or jest.setTimeout()` default_timeout_override: u32 = std.math.maxInt(u32), - event_loop_timer: JSC.API.Bun.Timer.EventLoopTimer = .{ + event_loop_timer: bun.api.Timer.EventLoopTimer = .{ .next = .{}, .tag = .TestRunner, }, @@ -279,7 +277,7 @@ pub const TestRunner = struct { pub const Jest = struct { pub var runner: ?*TestRunner = null; - fn globalHook(comptime name: string) JSC.JSHostZigFunction { + fn globalHook(comptime name: string) JSC.JSHostFnZig { return struct { pub fn appendGlobalFunctionCallback(globalThis: *JSGlobalObject, callframe: *CallFrame) bun.JSError!JSValue { const the_runner = runner orelse { @@ -323,7 +321,7 @@ pub const Jest = struct { const module = JSValue.createEmptyObject(globalObject, 14); - const test_fn = JSC.NewFunction(globalObject, ZigString.static("test"), 2, ThisTestScope.call, false); + const test_fn = JSC.host_fn.NewFunction(globalObject, ZigString.static("test"), 2, ThisTestScope.call, false); module.put( globalObject, ZigString.static("test"), @@ -335,14 +333,14 @@ pub const Jest = struct { test_fn.put( globalObject, name, - JSC.NewFunction(globalObject, name, 2, @field(ThisTestScope, method_name), false), + JSC.host_fn.NewFunction(globalObject, name, 2, @field(ThisTestScope, method_name), false), ); } test_fn.put( globalObject, ZigString.static("if"), - JSC.NewFunction(globalObject, ZigString.static("if"), 2, ThisTestScope.callIf, false), + JSC.host_fn.NewFunction(globalObject, ZigString.static("if"), 2, ThisTestScope.callIf, false), ); module.put( @@ -350,7 +348,7 @@ pub const Jest = struct { ZigString.static("it"), test_fn, ); - const describe = JSC.NewFunction(globalObject, ZigString.static("describe"), 2, ThisDescribeScope.call, false); + const describe = JSC.host_fn.NewFunction(globalObject, ZigString.static("describe"), 2, ThisDescribeScope.call, false); inline for (.{ "only", "skip", @@ -363,13 +361,13 @@ pub const Jest = struct { describe.put( globalObject, name, - JSC.NewFunction(globalObject, name, 2, @field(ThisDescribeScope, method_name), false), + JSC.host_fn.NewFunction(globalObject, name, 2, @field(ThisDescribeScope, method_name), false), ); } describe.put( globalObject, ZigString.static("if"), - JSC.NewFunction(globalObject, ZigString.static("if"), 2, ThisDescribeScope.callIf, false), + JSC.host_fn.NewFunction(globalObject, ZigString.static("if"), 2, ThisDescribeScope.callIf, false), ); module.put( @@ -380,9 +378,9 @@ pub const Jest = struct { inline for (.{ "beforeAll", "beforeEach", "afterAll", "afterEach" }) |name| { const function = if (outside_of_test) - JSC.NewFunction(globalObject, null, 1, globalHook(name), false) + JSC.host_fn.NewFunction(globalObject, null, 1, globalHook(name), false) else - JSC.NewFunction( + JSC.host_fn.NewFunction( globalObject, ZigString.static(name), 1, @@ -396,7 +394,7 @@ pub const Jest = struct { module.put( globalObject, ZigString.static("setDefaultTimeout"), - JSC.NewFunction(globalObject, ZigString.static("setDefaultTimeout"), 1, jsSetDefaultTimeout, false), + JSC.host_fn.NewFunction(globalObject, ZigString.static("setDefaultTimeout"), 1, jsSetDefaultTimeout, false), ); module.put( @@ -411,20 +409,20 @@ pub const Jest = struct { } fn createMockObjects(globalObject: *JSGlobalObject, module: JSValue) void { - const setSystemTime = JSC.NewFunction(globalObject, ZigString.static("setSystemTime"), 0, JSMock__jsSetSystemTime, false); + const setSystemTime = JSC.host_fn.NewFunction(globalObject, ZigString.static("setSystemTime"), 0, JSMock__jsSetSystemTime, false); module.put( globalObject, ZigString.static("setSystemTime"), setSystemTime, ); - const useFakeTimers = JSC.NewFunction(globalObject, ZigString.static("useFakeTimers"), 0, JSMock__jsUseFakeTimers, false); - const useRealTimers = JSC.NewFunction(globalObject, ZigString.static("useRealTimers"), 0, JSMock__jsUseRealTimers, false); + const useFakeTimers = JSC.host_fn.NewFunction(globalObject, ZigString.static("useFakeTimers"), 0, JSMock__jsUseFakeTimers, false); + const useRealTimers = JSC.host_fn.NewFunction(globalObject, ZigString.static("useRealTimers"), 0, JSMock__jsUseRealTimers, false); - const mockFn = JSC.NewFunction(globalObject, ZigString.static("fn"), 1, JSMock__jsMockFn, false); - const spyOn = JSC.NewFunction(globalObject, ZigString.static("spyOn"), 2, JSMock__jsSpyOn, false); - const restoreAllMocks = JSC.NewFunction(globalObject, ZigString.static("restoreAllMocks"), 2, JSMock__jsRestoreAllMocks, false); - const clearAllMocks = JSC.NewFunction(globalObject, ZigString.static("clearAllMocks"), 2, JSMock__jsClearAllMocks, false); - const mockModuleFn = JSC.NewFunction(globalObject, ZigString.static("module"), 2, JSMock__jsModuleMock, false); + const mockFn = JSC.host_fn.NewFunction(globalObject, ZigString.static("fn"), 1, JSMock__jsMockFn, false); + const spyOn = JSC.host_fn.NewFunction(globalObject, ZigString.static("spyOn"), 2, JSMock__jsSpyOn, false); + const restoreAllMocks = JSC.host_fn.NewFunction(globalObject, ZigString.static("restoreAllMocks"), 2, JSMock__jsRestoreAllMocks, false); + const clearAllMocks = JSC.host_fn.NewFunction(globalObject, ZigString.static("clearAllMocks"), 2, JSMock__jsClearAllMocks, false); + const mockModuleFn = JSC.host_fn.NewFunction(globalObject, ZigString.static("module"), 2, JSMock__jsModuleMock, false); module.put(globalObject, ZigString.static("mock"), mockFn); mockFn.put(globalObject, ZigString.static("module"), mockModuleFn); mockFn.put(globalObject, ZigString.static("restore"), restoreAllMocks); @@ -449,8 +447,8 @@ pub const Jest = struct { ZigString.static("useRealTimers"), useRealTimers, ); - jest.put(globalObject, ZigString.static("now"), JSC.NewFunction(globalObject, ZigString.static("now"), 0, JSMock__jsNow, false)); - jest.put(globalObject, ZigString.static("setTimeout"), JSC.NewFunction(globalObject, ZigString.static("setTimeout"), 1, jsSetDefaultTimeout, false)); + jest.put(globalObject, ZigString.static("now"), JSC.host_fn.NewFunction(globalObject, ZigString.static("now"), 0, JSMock__jsNow, false)); + jest.put(globalObject, ZigString.static("setTimeout"), JSC.host_fn.NewFunction(globalObject, ZigString.static("setTimeout"), 1, jsSetDefaultTimeout, false)); module.put(globalObject, ZigString.static("jest"), jest); module.put(globalObject, ZigString.static("spyOn"), spyOn); @@ -557,11 +555,11 @@ pub const TestScope = struct { actual: u32 = 0, }; - pub fn deinit(this: *TestScope, globalThis: *JSGlobalObject) void { + pub fn deinit(this: *TestScope, _: *JSGlobalObject) void { if (this.label.len > 0) { const label = this.label; this.label = ""; - getAllocator(globalThis).free(label); + bun.default_allocator.free(label); } } @@ -611,7 +609,7 @@ pub const TestScope = struct { globalThis.bunVM().autoGarbageCollect(); return JSValue.jsUndefined(); } - const jsOnReject = JSC.toJSHostFunction(onReject); + const jsOnReject = JSC.toJSHostFn(onReject); pub fn onResolve(globalThis: *JSGlobalObject, callframe: *CallFrame) bun.JSError!JSValue { debug("onResolve", .{}); @@ -621,7 +619,7 @@ pub const TestScope = struct { globalThis.bunVM().autoGarbageCollect(); return JSValue.jsUndefined(); } - const jsOnResolve = JSC.toJSHostFunction(onResolve); + const jsOnResolve = JSC.toJSHostFn(onResolve); pub fn onDone( globalThis: *JSGlobalObject, @@ -631,7 +629,7 @@ pub const TestScope = struct { const args = callframe.arguments_old(1); defer globalThis.bunVM().autoGarbageCollect(); - if (JSC.getFunctionData(function)) |data| { + if (JSC.host_fn.getFunctionData(function)) |data| { var task = bun.cast(*TestRunnerTask, data); const expect_count = expect.active_test_expectation_counter.actual; const current_test = task.testScope(); @@ -640,7 +638,7 @@ pub const TestScope = struct { else .{ .pass = expect_count }; - JSC.setFunctionData(function, null); + JSC.host_fn.setFunctionData(function, null); if (args.len > 0) { const err = args.ptr[0]; if (err.isEmptyOrUndefinedOrNull()) { @@ -711,7 +709,7 @@ pub const TestScope = struct { } if (this.func_has_callback) { - const callback_func = JSC.NewFunctionWithData( + const callback_func = JSC.host_fn.NewFunctionWithData( vm.global, ZigString.static("done"), 0, @@ -880,7 +878,7 @@ pub const DescribeScope = struct { pub threadlocal var active: ?*DescribeScope = null; - const CallbackFn = JSC.JSHostZigFunction; + const CallbackFn = JSC.JSHostFnZig; fn createCallback(comptime hook: LifecycleHook) CallbackFn { return struct { @@ -896,7 +894,7 @@ pub const DescribeScope = struct { } cb.protect(); - @field(DescribeScope.active.?, @tagName(hook) ++ "s").append(getAllocator(globalThis), cb) catch unreachable; + @field(DescribeScope.active.?, @tagName(hook) ++ "s").append(bun.default_allocator, cb) catch unreachable; return JSValue.jsBoolean(true); } }.run; @@ -910,9 +908,9 @@ pub const DescribeScope = struct { const args = callframe.arguments_old(1); defer ctx.bunVM().autoGarbageCollect(); - if (JSC.getFunctionData(function)) |data| { + if (JSC.host_fn.getFunctionData(function)) |data| { var scope = bun.cast(*DescribeScope, data); - JSC.setFunctionData(function, null); + JSC.host_fn.setFunctionData(function, null); if (args.len > 0) { const err = args.ptr[0]; if (!err.isEmptyOrUndefinedOrNull()) { @@ -934,7 +932,7 @@ pub const DescribeScope = struct { var hooks = &@field(this, @tagName(hook) ++ "s"); defer { if (comptime hook == .beforeAll or hook == .afterAll) { - hooks.clearAndFree(getAllocator(globalObject)); + hooks.clearAndFree(bun.default_allocator); } } @@ -954,7 +952,7 @@ pub const DescribeScope = struct { 0 => callJSFunctionForTestRunner(vm, globalObject, cb, &.{}), else => brk: { this.done = false; - const done_func = JSC.NewFunctionWithData( + const done_func = JSC.host_fn.NewFunctionWithData( globalObject, ZigString.static("done"), 0, @@ -991,7 +989,7 @@ pub const DescribeScope = struct { var hooks = &@field(Jest.runner.?.global_callbacks, @tagName(hook)); defer { if (comptime hook == .beforeAll or hook == .afterAll) { - hooks.clearAndFree(getAllocator(globalThis)); + hooks.clearAndFree(bun.default_allocator); } } @@ -1131,7 +1129,7 @@ pub const DescribeScope = struct { globalObject.clearTerminationException(); const file = this.file_id; - const allocator = getAllocator(globalObject); + const allocator = bun.default_allocator; const tests: []TestScope = this.tests.items; const end = @as(TestRunner.Test.ID, @truncate(tests.len)); this.pending_tests = std.DynamicBitSetUnmanaged.initFull(allocator, end) catch unreachable; @@ -1213,7 +1211,7 @@ pub const DescribeScope = struct { } pub fn deinit(this: *DescribeScope, globalThis: *JSGlobalObject) void { - const allocator = getAllocator(globalThis); + const allocator = bun.default_allocator; if (this.label.len > 0) { const label = this.label; @@ -1231,7 +1229,7 @@ pub const DescribeScope = struct { const ScopeStack = ObjectPool(std.ArrayListUnmanaged(*DescribeScope), null, true, 16); }; -pub fn wrapTestFunction(comptime name: []const u8, comptime func: JSC.JSHostZigFunction) DescribeScope.CallbackFn { +pub fn wrapTestFunction(comptime name: []const u8, comptime func: JSC.JSHostFnZig) DescribeScope.CallbackFn { return struct { pub fn wrapped(globalThis: *JSGlobalObject, callframe: *CallFrame) bun.JSError!JSValue { if (Jest.runner == null) { @@ -1765,7 +1763,7 @@ inline fn createScope( } } - const allocator = getAllocator(globalThis); + const allocator = bun.default_allocator; const parent = DescribeScope.active.?; const label = brk: { if (description == .zero) { @@ -1925,7 +1923,7 @@ inline fn createIfScope( }; switch (@intFromBool(value)) { - inline else => |index| return JSC.NewFunction(globalThis, name, 2, truthy_falsey[index], false), + inline else => |index| return JSC.host_fn.NewFunction(globalThis, name, 2, truthy_falsey[index], false), } } @@ -1938,7 +1936,7 @@ fn consumeArg( arg: *const JSValue, fallback: []const u8, ) !void { - const allocator = getAllocator(globalThis); + const allocator = bun.default_allocator; if (should_write) { const owned_slice = try arg.toSliceOrNull(globalThis); defer owned_slice.deinit(); @@ -1952,7 +1950,7 @@ fn consumeArg( // Generate test label by positionally injecting parameters with printf formatting fn formatLabel(globalThis: *JSGlobalObject, label: string, function_args: []JSValue, test_idx: usize) !string { - const allocator = getAllocator(globalThis); + const allocator = bun.default_allocator; var idx: usize = 0; var args_idx: usize = 0; var list = std.ArrayListUnmanaged(u8).initCapacity(allocator, label.len) catch bun.outOfMemory(); @@ -2064,10 +2062,10 @@ fn eachBind(globalThis: *JSGlobalObject, callframe: *CallFrame) bun.JSError!JSVa const parent = DescribeScope.active.?; - if (JSC.getFunctionData(callee)) |data| { - const allocator = getAllocator(globalThis); + if (JSC.host_fn.getFunctionData(callee)) |data| { + const allocator = bun.default_allocator; const each_data = bun.cast(*EachData, data); - JSC.setFunctionData(callee, null); + JSC.host_fn.setFunctionData(callee, null); const array = each_data.*.strong.get() orelse return .undefined; defer { each_data.*.strong.deinit(); @@ -2206,7 +2204,7 @@ inline fn createEach( return globalThis.throwPretty("{s} expects an array", .{signature}); } - const allocator = getAllocator(globalThis); + const allocator = bun.default_allocator; const name = ZigString.static(property); const strong = JSC.Strong.create(array, globalThis); const each_data = allocator.create(EachData) catch unreachable; @@ -2215,7 +2213,7 @@ inline fn createEach( .is_test = is_test, }; - return JSC.NewFunctionWithData(globalThis, name, 3, eachBind, true, each_data); + return JSC.host_fn.NewFunctionWithData(globalThis, name, 3, eachBind, true, each_data); } fn callJSFunctionForTestRunner(vm: *JSC.VirtualMachine, globalObject: *JSGlobalObject, function: JSValue, args: []const JSValue) JSValue { diff --git a/src/bun.js/test/pretty_format.zig b/src/bun.js/test/pretty_format.zig index 3aca743f1c..7ff69f5403 100644 --- a/src/bun.js/test/pretty_format.zig +++ b/src/bun.js/test/pretty_format.zig @@ -1275,7 +1275,7 @@ pub const JestPrettyFormat = struct { .Object, enable_ansi_colors, ); - } else if (value.as(JSC.API.Bun.Timer.TimeoutObject)) |timer| { + } else if (value.as(bun.api.Timer.TimeoutObject)) |timer| { this.addForNewLine("Timeout(# ) ".len + bun.fmt.fastDigitCount(@as(u64, @intCast(@max(timer.internals.id, 0))))); if (timer.internals.flags.kind == .setInterval) { this.addForNewLine("repeats ".len + bun.fmt.fastDigitCount(@as(u64, @intCast(@max(timer.internals.id, 0))))); @@ -1289,17 +1289,17 @@ pub const JestPrettyFormat = struct { } return; - } else if (value.as(JSC.API.Bun.Timer.ImmediateObject)) |immediate| { + } else if (value.as(bun.api.Timer.ImmediateObject)) |immediate| { this.addForNewLine("Immediate(# ) ".len + bun.fmt.fastDigitCount(@as(u64, @intCast(@max(immediate.internals.id, 0))))); writer.print(comptime Output.prettyFmt("Immediate (#{d})", enable_ansi_colors), .{ immediate.internals.id, }); return; - } else if (value.as(JSC.BuildMessage)) |build_log| { + } else if (value.as(bun.api.BuildMessage)) |build_log| { build_log.msg.writeFormat(writer_, enable_ansi_colors) catch {}; return; - } else if (value.as(JSC.ResolveMessage)) |resolve_log| { + } else if (value.as(bun.api.ResolveMessage)) |resolve_log| { resolve_log.msg.writeFormat(writer_, enable_ansi_colors) catch {}; return; } else if (printAsymmetricMatcher(this, Format, &writer, writer_, name_buf, value, enable_ansi_colors)) { diff --git a/src/bun.js/virtual_machine_exports.zig b/src/bun.js/virtual_machine_exports.zig new file mode 100644 index 0000000000..14c0da9e4f --- /dev/null +++ b/src/bun.js/virtual_machine_exports.zig @@ -0,0 +1,237 @@ +comptime { + if (bun.Environment.isWindows) { + @export(&Bun__ZigGlobalObject__uvLoop, .{ .name = "Bun__ZigGlobalObject__uvLoop" }); + } +} + +pub export fn Bun__VirtualMachine__isShuttingDown(this: *const VirtualMachine) callconv(.C) bool { + return this.isShuttingDown(); +} + +pub export fn Bun__getVM() *JSC.VirtualMachine { + return JSC.VirtualMachine.get(); +} + +pub export fn Bun__drainMicrotasks() void { + JSC.VirtualMachine.get().eventLoop().tick(); +} + +export fn Bun__readOriginTimer(vm: *JSC.VirtualMachine) u64 { + return vm.origin_timer.read(); +} + +export fn Bun__readOriginTimerStart(vm: *JSC.VirtualMachine) f64 { + // timespce to milliseconds + return @as(f64, @floatCast((@as(f64, @floatFromInt(vm.origin_timestamp)) + JSC.VirtualMachine.origin_relative_epoch) / 1_000_000.0)); +} + +pub export fn Bun__GlobalObject__hasIPC(global: *JSGlobalObject) bool { + return global.bunVM().ipc != null; +} + +export fn Bun__VirtualMachine__exitDuringUncaughtException(this: *JSC.VirtualMachine) void { + this.exit_on_uncaught_exception = true; +} + +comptime { + const Bun__Process__send = JSC.toJSHostFn(Bun__Process__send_); + @export(&Bun__Process__send, .{ .name = "Bun__Process__send" }); +} +pub fn Bun__Process__send_(globalObject: *JSGlobalObject, callFrame: *JSC.CallFrame) bun.JSError!JSValue { + JSC.markBinding(@src()); + var message, var handle, var options_, var callback = callFrame.argumentsAsArray(4); + + if (handle.isFunction()) { + callback = handle; + handle = .undefined; + options_ = .undefined; + } else if (options_.isFunction()) { + callback = options_; + options_ = .undefined; + } else if (!options_.isUndefined()) { + try globalObject.validateObject("options", options_, .{}); + } + + const S = struct { + fn impl(globalThis: *JSGlobalObject, callframe: *JSC.CallFrame) bun.JSError!JSValue { + const arguments_ = callframe.arguments_old(1).slice(); + const ex = arguments_[0]; + VirtualMachine.Process__emitErrorEvent(globalThis, ex); + return .undefined; + } + }; + + const vm = globalObject.bunVM(); + const ipc_instance = vm.getIPCInstance() orelse { + const ex = globalObject.ERR(.IPC_CHANNEL_CLOSED, "Channel closed.", .{}).toJS(); + if (callback.isFunction()) { + callback.callNextTick(globalObject, .{ex}); + } else { + const fnvalue = JSC.JSFunction.create(globalObject, "", S.impl, 1, .{}); + fnvalue.callNextTick(globalObject, .{ex}); + } + return .false; + }; + + if (message.isUndefined()) { + return globalObject.throwMissingArgumentsValue(&.{"message"}); + } + if (!message.isString() and !message.isObject() and !message.isNumber() and !message.isBoolean() and !message.isNull()) { + return globalObject.throwInvalidArgumentTypeValue("message", "string, object, number, or boolean", message); + } + + const good = ipc_instance.data.serializeAndSend(globalObject, message); + + if (good) { + if (callback.isFunction()) { + callback.callNextTick(globalObject, .{.null}); + } + } else { + const ex = globalObject.createTypeErrorInstance("process.send() failed", .{}); + ex.put(globalObject, JSC.ZigString.static("syscall"), bun.String.static("write").toJS(globalObject)); + if (callback.isFunction()) { + callback.callNextTick(globalObject, .{ex}); + } else { + const fnvalue = JSC.JSFunction.create(globalObject, "", S.impl, 1, .{}); + fnvalue.callNextTick(globalObject, .{ex}); + } + } + + return .true; +} + +pub export fn Bun__isBunMain(globalObject: *JSGlobalObject, str: *const bun.String) bool { + return str.eqlUTF8(globalObject.bunVM().main); +} + +/// When IPC environment variables are passed, the socket is not immediately opened, +/// but rather we wait for process.on('message') or process.send() to be called, THEN +/// we open the socket. This is to avoid missing messages at the start of the program. +pub export fn Bun__ensureProcessIPCInitialized(globalObject: *JSGlobalObject) void { + // getIPC() will initialize a "waiting" ipc instance so this is enough. + // it will do nothing if IPC is not enabled. + _ = globalObject.bunVM().getIPCInstance(); +} + +/// This function is called on the main thread +/// The bunVM() call will assert this +pub export fn Bun__queueTask(global: *JSGlobalObject, task: *JSC.CppTask) void { + JSC.markBinding(@src()); + + global.bunVM().eventLoop().enqueueTask(JSC.Task.init(task)); +} + +pub export fn Bun__queueTaskWithTimeout(global: *JSGlobalObject, task: *JSC.CppTask, milliseconds: i32) void { + JSC.markBinding(@src()); + + global.bunVM().eventLoop().enqueueTaskWithTimeout(JSC.Task.init(task), milliseconds); +} + +pub export fn Bun__reportUnhandledError(globalObject: *JSGlobalObject, value: JSValue) callconv(.C) JSValue { + JSC.markBinding(@src()); + // This JSGlobalObject might not be the main script execution context + // See the crash in https://github.com/oven-sh/bun/issues/9778 + const jsc_vm = JSC.VirtualMachine.get(); + _ = jsc_vm.uncaughtException(globalObject, value, false); + return .undefined; +} + +/// This function is called on another thread +/// The main difference: we need to allocate the task & wakeup the thread +/// We can avoid that if we run it from the main thread. +pub export fn Bun__queueTaskConcurrently(global: *JSGlobalObject, task: *JSC.CppTask) void { + JSC.markBinding(@src()); + + global.bunVMConcurrently().eventLoop().enqueueTaskConcurrent( + JSC.ConcurrentTask.create(JSC.Task.init(task)), + ); +} + +pub export fn Bun__handleRejectedPromise(global: *JSGlobalObject, promise: *JSC.JSPromise) void { + JSC.markBinding(@src()); + + const result = promise.result(global.vm()); + var jsc_vm = global.bunVM(); + + // this seems to happen in some cases when GC is running + if (result == .zero) + return; + + _ = jsc_vm.unhandledRejection(global, result, promise.asValue(global)); + jsc_vm.autoGarbageCollect(); +} + +pub export fn Bun__onDidAppendPlugin(jsc_vm: *VirtualMachine, globalObject: *JSGlobalObject) void { + if (jsc_vm.plugin_runner != null) { + return; + } + + jsc_vm.plugin_runner = PluginRunner{ + .global_object = globalObject, + .allocator = jsc_vm.allocator, + }; + jsc_vm.transpiler.linker.plugin_runner = &jsc_vm.plugin_runner.?; +} + +pub fn Bun__ZigGlobalObject__uvLoop(jsc_vm: *VirtualMachine) callconv(.C) *bun.windows.libuv.Loop { + return jsc_vm.uvLoop(); +} + +export fn Bun__setTLSRejectUnauthorizedValue(value: i32) void { + VirtualMachine.get().default_tls_reject_unauthorized = value != 0; +} + +export fn Bun__getTLSRejectUnauthorizedValue() i32 { + return if (JSC.VirtualMachine.get().getTLSRejectUnauthorized()) 1 else 0; +} + +export fn Bun__setVerboseFetchValue(value: i32) void { + VirtualMachine.get().default_verbose_fetch = if (value == 1) .headers else if (value == 2) .curl else .none; +} + +export fn Bun__getVerboseFetchValue() i32 { + return switch (JSC.VirtualMachine.get().getVerboseFetch()) { + .none => 0, + .headers => 1, + .curl => 2, + }; +} + +export fn Bun__addSourceProviderSourceMap(vm: *VirtualMachine, opaque_source_provider: *anyopaque, specifier: *bun.String) void { + var sfb = std.heap.stackFallback(4096, bun.default_allocator); + const slice = specifier.toUTF8(sfb.get()); + defer slice.deinit(); + vm.source_mappings.putZigSourceProvider(opaque_source_provider, slice.slice()); +} + +export fn Bun__removeSourceProviderSourceMap(vm: *VirtualMachine, opaque_source_provider: *anyopaque, specifier: *bun.String) void { + var sfb = std.heap.stackFallback(4096, bun.default_allocator); + const slice = specifier.toUTF8(sfb.get()); + defer slice.deinit(); + vm.source_mappings.removeZigSourceProvider(opaque_source_provider, slice.slice()); +} + +pub fn Bun__setSyntheticAllocationLimitForTesting(globalObject: *JSGlobalObject, callframe: *JSC.CallFrame) bun.JSError!JSValue { + const args = callframe.arguments_old(1).slice(); + if (args.len < 1) { + return globalObject.throwNotEnoughArguments("setSyntheticAllocationLimitForTesting", 1, args.len); + } + + if (!args[0].isNumber()) { + return globalObject.throwInvalidArguments("setSyntheticAllocationLimitForTesting expects a number", .{}); + } + + const limit: usize = @intCast(@max(args[0].coerceToInt64(globalObject), 1024 * 1024)); + const prev = VirtualMachine.synthetic_allocation_limit; + VirtualMachine.synthetic_allocation_limit = limit; + VirtualMachine.string_allocation_limit = limit; + return JSValue.jsNumber(prev); +} + +const std = @import("std"); +const bun = @import("bun"); +const JSC = bun.jsc; +const VirtualMachine = JSC.VirtualMachine; +const JSGlobalObject = JSC.JSGlobalObject; +const JSValue = JSC.JSValue; +const PluginRunner = bun.transpiler.PluginRunner; diff --git a/src/bun.js/webcore.zig b/src/bun.js/webcore.zig index cee06364c7..e128398d26 100644 --- a/src/bun.js/webcore.zig +++ b/src/bun.js/webcore.zig @@ -1,22 +1,106 @@ -pub usingnamespace @import("./webcore/response.zig"); -pub usingnamespace @import("./webcore/encoding.zig"); -pub usingnamespace @import("./webcore/streams.zig"); -pub usingnamespace @import("./webcore/blob.zig"); -pub const S3Stat = @import("./webcore/S3Stat.zig").S3Stat; -pub const S3Client = @import("./webcore/S3Client.zig").S3Client; -pub usingnamespace @import("./webcore/request.zig"); -pub usingnamespace @import("./webcore/body.zig"); -pub const CookieMap = @import("./webcore/CookieMap.zig").CookieMap; -pub const ObjectURLRegistry = @import("./webcore/ObjectURLRegistry.zig"); -const JSC = bun.JSC; -const std = @import("std"); -const bun = @import("bun"); -const string = bun.string; -pub const AbortSignal = @import("./bindings/bindings.zig").AbortSignal; -pub const JSValue = @import("./bindings/bindings.zig").JSValue; -const Environment = bun.Environment; -const UUID7 = @import("./uuid.zig").UUID7; -const c = bun.c; +//! Web APIs implemented in Zig live here + +comptime { + if (bun.Environment.export_cpp_apis) { + _ = &@import("webcore/prompt.zig"); + } + _ = &@import("webcore/TextEncoder.zig"); +} + +// TODO: make this JSGlobalObject local for better security +pub const ByteListPool = bun.ObjectPool(bun.ByteList, null, true, 8); + +pub const Crypto = @import("webcore/Crypto.zig"); +pub const AbortSignal = @import("bindings/AbortSignal.zig").AbortSignal; +pub const WebWorker = @import("web_worker.zig").WebWorker; +pub const AutoFlusher = @import("webcore/AutoFlusher.zig"); +pub const EncodingLabel = @import("webcore/EncodingLabel.zig").EncodingLabel; +pub const Fetch = @import("webcore/fetch.zig"); +pub const Response = @import("webcore/Response.zig"); +pub const TextDecoder = @import("webcore/TextDecoder.zig"); +pub const TextEncoder = @import("webcore/TextEncoder.zig"); +pub const TextEncoderStreamEncoder = @import("webcore/TextEncoderStreamEncoder.zig"); +pub const encoding = @import("webcore/encoding.zig"); +pub const ReadableStream = @import("webcore/ReadableStream.zig"); +pub const Blob = @import("webcore/Blob.zig"); +pub const S3Stat = @import("webcore/S3Stat.zig").S3Stat; +pub const S3Client = @import("webcore/S3Client.zig").S3Client; +pub const Request = @import("webcore/Request.zig"); +pub const Body = @import("webcore/Body.zig"); +pub const CookieMap = @import("webcore/CookieMap.zig").CookieMap; +pub const ObjectURLRegistry = @import("webcore/ObjectURLRegistry.zig"); +pub const Sink = @import("webcore/Sink.zig"); +pub const FileSink = @import("webcore/FileSink.zig"); +pub const FetchHeaders = @import("bindings/FetchHeaders.zig").FetchHeaders; +pub const ByteBlobLoader = @import("webcore/ByteBlobLoader.zig"); +pub const ByteStream = @import("webcore/ByteStream.zig"); +pub const FileReader = @import("webcore/FileReader.zig"); + +pub const streams = @import("webcore/streams.zig"); +pub const NetworkSink = streams.NetworkSink; +pub const HTTPResponseSink = streams.HTTPResponseSink; +pub const HTTPSResponseSink = streams.HTTPSResponseSink; +pub const HTTPServerWritable = streams.HTTPServerWritable; + +const WebSocketHTTPClient = @import("../http/websocket_http_client.zig").WebSocketHTTPClient; +const WebSocketHTTPSClient = @import("../http/websocket_http_client.zig").WebSocketHTTPSClient; +const WebSocketClient = @import("../http/websocket_http_client.zig").WebSocketClient; +const WebSocketClientTLS = @import("../http/websocket_http_client.zig").WebSocketClientTLS; +comptime { + WebSocketClient.exportAll(); + WebSocketClientTLS.exportAll(); + WebSocketHTTPClient.exportAll(); + WebSocketHTTPSClient.exportAll(); +} + +pub const PathOrFileDescriptor = union(enum) { + path: JSC.ZigString.Slice, + fd: bun.FileDescriptor, + + pub fn deinit(this: *const PathOrFileDescriptor) void { + if (this.* == .path) this.path.deinit(); + } +}; + +pub const Pipe = struct { + ctx: ?*anyopaque = null, + onPipe: ?Function = null, + + pub const Function = *const fn ( + ctx: *anyopaque, + stream: streams.Result, + allocator: std.mem.Allocator, + ) void; + + pub fn Wrap(comptime Type: type, comptime function: anytype) type { + return struct { + pub fn pipe(self: *anyopaque, stream: streams.Result, allocator: std.mem.Allocator) void { + function( + @as(*Type, @ptrCast(@alignCast(self))), + stream, + allocator, + ); + } + + pub fn init(self: *Type) Pipe { + return Pipe{ + .ctx = self, + .onPipe = pipe, + }; + } + }; + } +}; + +pub const DrainResult = union(enum) { + owned: struct { + list: std.ArrayList(u8), + size_hint: usize, + }, + estimated_size: usize, + empty: void, + aborted: void, +}; pub const Lifetime = enum { clone, @@ -26,539 +110,6 @@ pub const Lifetime = enum { temporary, }; -/// https://html.spec.whatwg.org/multipage/timers-and-user-prompts.html#dom-alert -fn alert(globalObject: *JSC.JSGlobalObject, callframe: *JSC.CallFrame) bun.JSError!JSC.JSValue { - const arguments = callframe.arguments_old(1).slice(); - var output = bun.Output.writer(); - const has_message = arguments.len != 0; - - // 2. If the method was invoked with no arguments, then let message be the empty string; otherwise, let message be the method's first argument. - if (has_message) { - var state = std.heap.stackFallback(2048, bun.default_allocator); - const allocator = state.get(); - const message = try arguments[0].toSlice(globalObject, allocator); - defer message.deinit(); - - if (message.len > 0) { - // 3. Set message to the result of normalizing newlines given message. - // * We skip step 3 because they are already done in most terminals by default. - - // 4. Set message to the result of optionally truncating message. - // * We just don't do this because it's not necessary. - - // 5. Show message to the user, treating U+000A LF as a line break. - output.writeAll(message.slice()) catch { - // 1. If we cannot show simple dialogs for this, then return. - return .undefined; - }; - } - } - - output.writeAll(if (has_message) " [Enter] " else "Alert [Enter] ") catch { - // 1. If we cannot show simple dialogs for this, then return. - return .undefined; - }; - - // 6. Invoke WebDriver BiDi user prompt opened with this, "alert", and message. - // * Not pertinent to use their complex system in a server context. - bun.Output.flush(); - - // 7. Optionally, pause while waiting for the user to acknowledge the message. - var stdin = std.io.getStdIn(); - var reader = stdin.reader(); - while (true) { - const byte = reader.readByte() catch break; - if (byte == '\n') break; - } - - // 8. Invoke WebDriver BiDi user prompt closed with this and true. - // * Again, not necessary in a server context. - - return .undefined; -} - -fn confirm(globalObject: *JSC.JSGlobalObject, callframe: *JSC.CallFrame) bun.JSError!JSC.JSValue { - const arguments = callframe.arguments_old(1).slice(); - var output = bun.Output.writer(); - const has_message = arguments.len != 0; - - if (has_message) { - var state = std.heap.stackFallback(1024, bun.default_allocator); - const allocator = state.get(); - // 2. Set message to the result of normalizing newlines given message. - // * Not pertinent to a server runtime so we will just let the terminal handle this. - - // 3. Set message to the result of optionally truncating message. - // * Not necessary so we won't do it. - const message = try arguments[0].toSlice(globalObject, allocator); - defer message.deinit(); - - output.writeAll(message.slice()) catch { - // 1. If we cannot show simple dialogs for this, then return false. - return .false; - }; - } - - // 4. Show message to the user, treating U+000A LF as a line break, - // and ask the user to respond with a positive or negative - // response. - output.writeAll(if (has_message) " [y/N] " else "Confirm [y/N] ") catch { - // 1. If we cannot show simple dialogs for this, then return false. - return .false; - }; - - // 5. Invoke WebDriver BiDi user prompt opened with this, "confirm", and message. - // * Not relevant in a server context. - bun.Output.flush(); - - // 6. Pause until the user responds either positively or negatively. - var stdin = std.io.getStdIn(); - const unbuffered_reader = stdin.reader(); - var buffered = std.io.bufferedReader(unbuffered_reader); - var reader = buffered.reader(); - - const first_byte = reader.readByte() catch { - return .false; - }; - - // 7. Invoke WebDriver BiDi user prompt closed with this, and true if - // the user responded positively or false otherwise. - // * Not relevant in a server context. - - switch (first_byte) { - '\n' => return .false, - '\r' => { - const next_byte = reader.readByte() catch { - // They may have said yes, but the stdin is invalid. - return .false; - }; - if (next_byte == '\n') { - return .false; - } - }, - 'y', 'Y' => { - const next_byte = reader.readByte() catch { - // They may have said yes, but the stdin is invalid. - - return .false; - }; - - if (next_byte == '\n') { - // 8. If the user responded positively, return true; - // otherwise, the user responded negatively: return false. - return .true; - } else if (next_byte == '\r') { - //Check Windows style - const second_byte = reader.readByte() catch { - return .false; - }; - if (second_byte == '\n') { - return .true; - } - } - }, - else => {}, - } - - while (reader.readByte()) |b| { - if (b == '\n' or b == '\r') break; - } else |_| {} - - // 8. If the user responded positively, return true; otherwise, the user - // responded negatively: return false. - return .false; -} - -pub const Prompt = struct { - /// Adapted from `std.io.Reader.readUntilDelimiterArrayList` to only append - /// and assume capacity. - pub fn readUntilDelimiterArrayListAppendAssumeCapacity( - reader: anytype, - array_list: *std.ArrayList(u8), - delimiter: u8, - max_size: usize, - ) !void { - while (true) { - if (array_list.items.len == max_size) { - return error.StreamTooLong; - } - - const byte: u8 = try reader.readByte(); - - if (byte == delimiter) { - return; - } - - array_list.appendAssumeCapacity(byte); - } - } - - /// Adapted from `std.io.Reader.readUntilDelimiterArrayList` to always append - /// and not resize. - fn readUntilDelimiterArrayListInfinity( - reader: anytype, - array_list: *std.ArrayList(u8), - delimiter: u8, - ) !void { - while (true) { - const byte: u8 = try reader.readByte(); - - if (byte == delimiter) { - return; - } - - try array_list.append(byte); - } - } - - /// https://html.spec.whatwg.org/multipage/timers-and-user-prompts.html#dom-prompt - pub fn call( - globalObject: *JSC.JSGlobalObject, - callframe: *JSC.CallFrame, - ) bun.JSError!JSC.JSValue { - const arguments = callframe.arguments_old(3).slice(); - var state = std.heap.stackFallback(2048, bun.default_allocator); - const allocator = state.get(); - var output = bun.Output.writer(); - const has_message = arguments.len != 0; - const has_default = arguments.len >= 2; - // 4. Set default to the result of optionally truncating default. - // * We don't really need to do this. - const default = if (has_default) arguments[1] else .null; - - if (has_message) { - // 2. Set message to the result of normalizing newlines given message. - // * Not pertinent to a server runtime so we will just let the terminal handle this. - - // 3. Set message to the result of optionally truncating message. - // * Not necessary so we won't do it. - const message = try arguments[0].toSlice(globalObject, allocator); - defer message.deinit(); - - output.writeAll(message.slice()) catch { - // 1. If we cannot show simple dialogs for this, then return null. - return .null; - }; - } - - // 4. Set default to the result of optionally truncating default. - - // 5. Show message to the user, treating U+000A LF as a line break, - // and ask the user to either respond with a string value or - // abort. The response must be defaulted to the value given by - // default. - output.writeAll(if (has_message) " " else "Prompt ") catch { - // 1. If we cannot show simple dialogs for this, then return false. - return .false; - }; - - if (has_default) { - const default_string = try arguments[1].toSlice(globalObject, allocator); - defer default_string.deinit(); - - output.print("[{s}] ", .{default_string.slice()}) catch { - // 1. If we cannot show simple dialogs for this, then return false. - return .false; - }; - } - - // 6. Invoke WebDriver BiDi user prompt opened with this, "prompt" and message. - // * Not relevant in a server context. - bun.Output.flush(); - - // unset `ENABLE_VIRTUAL_TERMINAL_INPUT` on windows. This prevents backspace from - // deleting the entire line - const original_mode: if (Environment.isWindows) ?bun.windows.DWORD else void = if (comptime Environment.isWindows) - bun.windows.updateStdioModeFlags(.std_in, .{ .unset = c.ENABLE_VIRTUAL_TERMINAL_INPUT }) catch null; - - defer if (comptime Environment.isWindows) { - if (original_mode) |mode| { - _ = bun.c.SetConsoleMode(bun.FD.stdin().native(), mode); - } - }; - - // 7. Pause while waiting for the user's response. - const reader = bun.Output.buffered_stdin.reader(); - var second_byte: ?u8 = null; - const first_byte = reader.readByte() catch { - // 8. Let result be null if the user aborts, or otherwise the string - // that the user responded with. - return .null; - }; - - if (first_byte == '\n') { - // 8. Let result be null if the user aborts, or otherwise the string - // that the user responded with. - return default; - } else if (first_byte == '\r') { - const second = reader.readByte() catch return .null; - second_byte = second; - if (second == '\n') return default; - } - - var input = std.ArrayList(u8).initCapacity(allocator, 2048) catch { - // 8. Let result be null if the user aborts, or otherwise the string - // that the user responded with. - return .null; - }; - defer input.deinit(); - - input.appendAssumeCapacity(first_byte); - if (second_byte) |second| input.appendAssumeCapacity(second); - - // All of this code basically just first tries to load the input into a - // buffer of size 2048. If that is too small, then increase the buffer - // size to 4096. If that is too small, then just dynamically allocate - // the rest. - readUntilDelimiterArrayListAppendAssumeCapacity(reader, &input, '\n', 2048) catch |e| { - if (e != error.StreamTooLong) { - // 8. Let result be null if the user aborts, or otherwise the string - // that the user responded with. - return .null; - } - - input.ensureTotalCapacity(4096) catch { - // 8. Let result be null if the user aborts, or otherwise the string - // that the user responded with. - return .null; - }; - - readUntilDelimiterArrayListAppendAssumeCapacity(reader, &input, '\n', 4096) catch |e2| { - if (e2 != error.StreamTooLong) { - // 8. Let result be null if the user aborts, or otherwise the string - // that the user responded with. - return .null; - } - - readUntilDelimiterArrayListInfinity(reader, &input, '\n') catch { - // 8. Let result be null if the user aborts, or otherwise the string - // that the user responded with. - return .null; - }; - }; - }; - - if (input.items.len > 0 and input.items[input.items.len - 1] == '\r') { - input.items.len -= 1; - } - - if (comptime Environment.allow_assert) { - bun.assert(input.items.len > 0); - bun.assert(input.items[input.items.len - 1] != '\r'); - } - - // 8. Let result be null if the user aborts, or otherwise the string - // that the user responded with. - var result = JSC.ZigString.init(input.items); - result.markUTF8(); - - // 9. Invoke WebDriver BiDi user prompt closed with this, false if - // result is null or true otherwise, and result. - // * Too complex for server context. - - // 9. Return result. - return result.toJS(globalObject); - } -}; - -pub const Crypto = struct { - pub const js = JSC.Codegen.JSCrypto; - pub const toJS = js.toJS; - pub const fromJS = js.fromJS; - pub const fromJSDirect = js.fromJSDirect; - - garbage: i32 = 0, - - const BoringSSL = bun.BoringSSL.c; - - fn throwInvalidParameter(globalThis: *JSC.JSGlobalObject) bun.JSError { - return globalThis.ERR(.CRYPTO_SCRYPT_INVALID_PARAMETER, "Invalid scrypt parameters", .{}).throw(); - } - - fn throwInvalidParams(globalThis: *JSC.JSGlobalObject, comptime error_type: @Type(.enum_literal), comptime message: [:0]const u8, fmt: anytype) bun.JSError { - if (error_type != .RangeError) @compileError("Error type not added!"); - BoringSSL.ERR_clear_error(); - return globalThis.ERR(.CRYPTO_INVALID_SCRYPT_PARAMS, message, fmt).throw(); - } - - pub fn timingSafeEqual(_: *@This(), global: *JSC.JSGlobalObject, callframe: *JSC.CallFrame) bun.JSError!JSC.JSValue { - return JSC.Node.Crypto.timingSafeEqual(global, callframe); - } - - pub fn timingSafeEqualWithoutTypeChecks( - _: *@This(), - globalThis: *JSC.JSGlobalObject, - array_a: *JSC.JSUint8Array, - array_b: *JSC.JSUint8Array, - ) JSC.JSValue { - const a = array_a.slice(); - const b = array_b.slice(); - - const len = a.len; - if (b.len != len) { - return globalThis.ERR(.CRYPTO_TIMING_SAFE_EQUAL_LENGTH, "Input buffers must have the same byte length", .{}).throw(); - } - - return JSC.jsBoolean(bun.BoringSSL.c.CRYPTO_memcmp(a.ptr, b.ptr, len) == 0); - } - - pub fn getRandomValues( - _: *@This(), - globalThis: *JSC.JSGlobalObject, - callframe: *JSC.CallFrame, - ) bun.JSError!JSC.JSValue { - const arguments = callframe.arguments_old(1).slice(); - if (arguments.len == 0) { - return globalThis.throwInvalidArguments("Expected typed array but got nothing", .{}); - } - - var array_buffer = arguments[0].asArrayBuffer(globalThis) orelse { - return globalThis.throwInvalidArguments("Expected typed array but got {s}", .{@tagName(arguments[0].jsType())}); - }; - const slice = array_buffer.byteSlice(); - - randomData(globalThis, slice.ptr, slice.len); - - return arguments[0]; - } - - pub fn getRandomValuesWithoutTypeChecks( - _: *@This(), - globalThis: *JSC.JSGlobalObject, - array: *JSC.JSUint8Array, - ) JSC.JSValue { - const slice = array.slice(); - randomData(globalThis, slice.ptr, slice.len); - return @as(JSC.JSValue, @enumFromInt(@as(i64, @bitCast(@intFromPtr(array))))); - } - - fn randomData( - globalThis: *JSC.JSGlobalObject, - ptr: [*]u8, - len: usize, - ) void { - const slice = ptr[0..len]; - - switch (slice.len) { - 0 => {}, - // 512 bytes or less we reuse from the same cache as UUID generation. - 1...JSC.RareData.EntropyCache.size / 8 => { - bun.copy(u8, slice, globalThis.bunVM().rareData().entropySlice(slice.len)); - }, - else => { - bun.csprng(slice); - }, - } - } - - pub fn randomUUID( - _: *@This(), - globalThis: *JSC.JSGlobalObject, - _: *JSC.CallFrame, - ) bun.JSError!JSC.JSValue { - var str, var bytes = bun.String.createUninitialized(.latin1, 36); - - const uuid = globalThis.bunVM().rareData().nextUUID(); - - uuid.print(bytes[0..36]); - return str.transferToJS(globalThis); - } - - comptime { - const Bun__randomUUIDv7 = JSC.toJSHostFunction(Bun__randomUUIDv7_); - @export(&Bun__randomUUIDv7, .{ .name = "Bun__randomUUIDv7" }); - } - pub fn Bun__randomUUIDv7_(globalThis: *JSC.JSGlobalObject, callframe: *JSC.CallFrame) bun.JSError!JSC.JSValue { - const arguments = callframe.argumentsUndef(2).slice(); - - var encoding_value: JSC.JSValue = .undefined; - - const encoding: JSC.Node.Encoding = brk: { - if (arguments.len > 0) { - if (arguments[0] != .undefined) { - if (arguments[0].isString()) { - encoding_value = arguments[0]; - break :brk try JSC.Node.Encoding.fromJS(encoding_value, globalThis) orelse { - return globalThis.ERR(.UNKNOWN_ENCODING, "Encoding must be one of base64, base64url, hex, or buffer", .{}).throw(); - }; - } - } - } - - break :brk JSC.Node.Encoding.hex; - }; - - const timestamp: u64 = brk: { - const timestamp_value: JSC.JSValue = if (encoding_value != .undefined and arguments.len > 1) - arguments[1] - else if (arguments.len == 1 and encoding_value == .undefined) - arguments[0] - else - .undefined; - - if (timestamp_value != .undefined) { - if (timestamp_value.isDate()) { - const date = timestamp_value.getUnixTimestamp(); - break :brk @intFromFloat(@max(0, date)); - } - break :brk @intCast(try globalThis.validateIntegerRange(timestamp_value, i64, 0, .{ .min = 0, .field_name = "timestamp" })); - } - - break :brk @intCast(@max(0, std.time.milliTimestamp())); - }; - - const entropy = globalThis.bunVM().rareData().entropySlice(8); - - const uuid = UUID7.init(timestamp, &entropy[0..8].*); - - if (encoding == .hex) { - var str, var bytes = bun.String.createUninitialized(.latin1, 36); - uuid.print(bytes[0..36]); - return str.transferToJS(globalThis); - } - - return encoding.encodeWithMaxSize(globalThis, 32, &uuid.bytes); - } - - pub fn randomUUIDWithoutTypeChecks( - _: *Crypto, - globalThis: *JSC.JSGlobalObject, - ) JSC.JSValue { - const str, var bytes = bun.String.createUninitialized(.latin1, 36); - defer str.deref(); - - // randomUUID must have been called already many times before this kicks - // in so we can skip the rare_data pointer check. - const uuid = globalThis.bunVM().rare_data.?.nextUUID(); - - uuid.print(bytes[0..36]); - return str.toJS(globalThis); - } - - pub fn constructor(globalThis: *JSC.JSGlobalObject, _: *JSC.CallFrame) bun.JSError!*Crypto { - return JSC.Error.ILLEGAL_CONSTRUCTOR.throw(globalThis, "Crypto is not constructable", .{}); - } - - pub export fn CryptoObject__create(globalThis: *JSC.JSGlobalObject) JSC.JSValue { - JSC.markBinding(@src()); - - var ptr = bun.default_allocator.create(Crypto) catch { - return globalThis.throwOutOfMemoryValue(); - }; - - return ptr.toJS(globalThis); - } - - comptime { - _ = CryptoObject__create; - } -}; - -comptime { - const js_alert = JSC.toJSHostFunction(alert); - @export(&js_alert, .{ .name = "WebCore__alert" }); - const js_prompt = JSC.toJSHostFunction(Prompt.call); - @export(&js_prompt, .{ .name = "WebCore__prompt" }); - const js_confirm = JSC.toJSHostFunction(confirm); - @export(&js_confirm, .{ .name = "WebCore__confirm" }); -} +const std = @import("std"); +const bun = @import("bun"); +const JSC = bun.JSC; diff --git a/src/bun.js/webcore/ArrayBufferSink.zig b/src/bun.js/webcore/ArrayBufferSink.zig new file mode 100644 index 0000000000..28b0b4303f --- /dev/null +++ b/src/bun.js/webcore/ArrayBufferSink.zig @@ -0,0 +1,193 @@ +const ArrayBufferSink = @This(); +pub const JSSink = webcore.Sink.JSSink(@This(), "ArrayBufferSink"); + +bytes: bun.ByteList, +allocator: std.mem.Allocator, +done: bool = false, +signal: Signal = .{}, +next: ?Sink = null, +streaming: bool = false, +as_uint8array: bool = false, + +pub fn connect(this: *ArrayBufferSink, signal: Signal) void { + bun.assert(this.reader == null); + this.signal = signal; +} + +pub fn start(this: *ArrayBufferSink, stream_start: streams.Start) JSC.Maybe(void) { + this.bytes.len = 0; + var list = this.bytes.listManaged(this.allocator); + list.clearRetainingCapacity(); + + switch (stream_start) { + .ArrayBufferSink => |config| { + if (config.chunk_size > 0) { + list.ensureTotalCapacityPrecise(config.chunk_size) catch return .{ .err = Syscall.Error.oom }; + this.bytes.update(list); + } + + this.as_uint8array = config.as_uint8array; + this.streaming = config.stream; + }, + else => {}, + } + + this.done = false; + + this.signal.start(); + return .{ .result = {} }; +} + +pub fn flush(_: *ArrayBufferSink) JSC.Maybe(void) { + return .{ .result = {} }; +} + +pub fn flushFromJS(this: *ArrayBufferSink, globalThis: *JSGlobalObject, wait: bool) JSC.Maybe(JSValue) { + if (this.streaming) { + const value: JSValue = switch (this.as_uint8array) { + true => JSC.ArrayBuffer.create(globalThis, this.bytes.slice(), .Uint8Array), + false => JSC.ArrayBuffer.create(globalThis, this.bytes.slice(), .ArrayBuffer), + }; + this.bytes.len = 0; + if (wait) {} + return .{ .result = value }; + } + + return .{ .result = JSValue.jsNumber(0) }; +} + +pub fn finalize(this: *ArrayBufferSink) void { + if (this.bytes.len > 0) { + this.bytes.listManaged(this.allocator).deinit(); + this.bytes = bun.ByteList.init(""); + this.done = true; + } + + this.allocator.destroy(this); +} + +pub fn init(allocator: std.mem.Allocator, next: ?Sink) !*ArrayBufferSink { + const this = try allocator.create(ArrayBufferSink); + this.* = ArrayBufferSink{ + .bytes = bun.ByteList.init(&.{}), + .allocator = allocator, + .next = next, + }; + return this; +} + +pub fn construct( + this: *ArrayBufferSink, + allocator: std.mem.Allocator, +) void { + this.* = ArrayBufferSink{ + .bytes = bun.ByteList{}, + .allocator = allocator, + .next = null, + }; +} + +pub fn write(this: *@This(), data: streams.Result) streams.Result.Writable { + if (this.next) |*next| { + return next.writeBytes(data); + } + + const len = this.bytes.write(this.allocator, data.slice()) catch { + return .{ .err = Syscall.Error.oom }; + }; + this.signal.ready(null, null); + return .{ .owned = len }; +} +pub const writeBytes = write; +pub fn writeLatin1(this: *@This(), data: streams.Result) streams.Result.Writable { + if (this.next) |*next| { + return next.writeLatin1(data); + } + const len = this.bytes.writeLatin1(this.allocator, data.slice()) catch { + return .{ .err = Syscall.Error.oom }; + }; + this.signal.ready(null, null); + return .{ .owned = len }; +} +pub fn writeUTF16(this: *@This(), data: streams.Result) streams.Result.Writable { + if (this.next) |*next| { + return next.writeUTF16(data); + } + const len = this.bytes.writeUTF16(this.allocator, @as([*]const u16, @ptrCast(@alignCast(data.slice().ptr)))[0..std.mem.bytesAsSlice(u16, data.slice()).len]) catch { + return .{ .err = Syscall.Error.oom }; + }; + this.signal.ready(null, null); + return .{ .owned = len }; +} + +pub fn end(this: *ArrayBufferSink, err: ?Syscall.Error) JSC.Maybe(void) { + if (this.next) |*next| { + return next.end(err); + } + this.signal.close(err); + return .{ .result = {} }; +} +pub fn destroy(this: *ArrayBufferSink) void { + this.bytes.deinitWithAllocator(this.allocator); + this.allocator.destroy(this); +} +pub fn toJS(this: *ArrayBufferSink, globalThis: *JSGlobalObject, as_uint8array: bool) JSValue { + if (this.streaming) { + const value: JSValue = switch (as_uint8array) { + true => JSC.ArrayBuffer.create(globalThis, this.bytes.slice(), .Uint8Array), + false => JSC.ArrayBuffer.create(globalThis, this.bytes.slice(), .ArrayBuffer), + }; + this.bytes.len = 0; + return value; + } + + var list = this.bytes.listManaged(this.allocator); + this.bytes = bun.ByteList.init(""); + return ArrayBuffer.fromBytes( + try list.toOwnedSlice(), + if (as_uint8array) + .Uint8Array + else + .ArrayBuffer, + ).toJS(globalThis, null); +} + +pub fn endFromJS(this: *ArrayBufferSink, _: *JSGlobalObject) JSC.Maybe(ArrayBuffer) { + if (this.done) { + return .{ .result = ArrayBuffer.fromBytes(&[_]u8{}, .ArrayBuffer) }; + } + + bun.assert(this.next == null); + var list = this.bytes.listManaged(this.allocator); + this.bytes = bun.ByteList.init(""); + this.done = true; + this.signal.close(null); + return .{ .result = ArrayBuffer.fromBytes( + list.toOwnedSlice() catch bun.outOfMemory(), + if (this.as_uint8array) + .Uint8Array + else + .ArrayBuffer, + ) }; +} + +pub fn sink(this: *ArrayBufferSink) Sink { + return Sink.init(this); +} + +pub fn memoryCost(this: *const ArrayBufferSink) usize { + // Since this is a JSSink, the NewJSSink function does @sizeOf(JSSink) which includes @sizeOf(ArrayBufferSink). + return this.bytes.cap; +} + +const std = @import("std"); +const bun = @import("bun"); +const JSC = bun.JSC; +const Syscall = bun.sys; +const Sink = webcore.Sink; +const webcore = bun.webcore; +const streams = webcore.streams; +const Signal = webcore.streams.Signal; +const JSGlobalObject = JSC.JSGlobalObject; +const JSValue = JSC.JSValue; +const ArrayBuffer = JSC.ArrayBuffer; diff --git a/src/bun.js/webcore/Blob.zig b/src/bun.js/webcore/Blob.zig new file mode 100644 index 0000000000..b83f8d48d2 --- /dev/null +++ b/src/bun.js/webcore/Blob.zig @@ -0,0 +1,4774 @@ +//! The JS `Blob` class can be backed by different forms (in Blob.Store), which +//! represent different sources of Blob. For example, `Bun.file()` returns Blob +//! objects that reference the filesystem (Blob.Store.File). This is how +//! operations like writing `Store.File` to another `Store.File` knows to use a +//! basic file copy instead of a naive read write loop. +const Blob = @This(); +const debug = Output.scoped(.Blob, false); + +pub const Store = @import("blob/Store.zig"); +pub const read_file = @import("blob/read_file.zig"); +pub const write_file = @import("blob/write_file.zig"); +pub const copy_file = @import("blob/copy_file.zig"); + +pub const new = bun.TrivialNew(@This()); +pub const js = JSC.Codegen.JSBlob; +pub const fromJS = js.fromJS; +pub const fromJSDirect = js.fromJSDirect; + +reported_estimated_size: usize = 0, + +size: SizeType = 0, +offset: SizeType = 0, +/// When set, the blob will be freed on finalization callbacks +/// If the blob is contained in Response or Request, this must be null +allocator: ?std.mem.Allocator = null, +store: ?*Store = null, +content_type: string = "", +content_type_allocated: bool = false, +content_type_was_set: bool = false, + +/// JavaScriptCore strings are either latin1 or UTF-16 +/// When UTF-16, they're nearly always due to non-ascii characters +is_all_ascii: ?bool = null, + +/// Was it created via file constructor? +is_jsdom_file: bool = false, + +globalThis: *JSGlobalObject = undefined, + +last_modified: f64 = 0.0, +/// Blob name will lazy initialize when getName is called, but +/// we must be able to set the name, and we need to keep the value alive +/// https://github.com/oven-sh/bun/issues/10178 +name: bun.String = .dead, + +/// Max int of double precision +/// 9 petabytes is probably enough for awhile +/// We want to avoid coercing to a BigInt because that's a heap allocation +/// and it's generally just harder to use +pub const SizeType = u52; +pub const max_size = std.math.maxInt(SizeType); + +/// 1: Initial +/// 2: Added byte for whether it's a dom file, length and bytes for `stored_name`, +/// and f64 for `last_modified`. Removed reserved bytes, it's handled by version +/// number. +const serialization_version: u8 = 2; + +comptime { + _ = Bun__Blob__getSizeForBindings; +} + +pub const ClosingState = enum(u8) { + running, + closing, +}; + +pub fn getFormDataEncoding(this: *Blob) ?*bun.FormData.AsyncFormData { + var content_type_slice: ZigString.Slice = this.getContentType() orelse return null; + defer content_type_slice.deinit(); + const encoding = bun.FormData.Encoding.get(content_type_slice.slice()) orelse return null; + return bun.FormData.AsyncFormData.init(this.allocator orelse bun.default_allocator, encoding) catch bun.outOfMemory(); +} + +pub fn hasContentTypeFromUser(this: *const Blob) bool { + return this.content_type_was_set or (this.store != null and (this.store.?.data == .file or this.store.?.data == .s3)); +} + +pub fn contentTypeOrMimeType(this: *const Blob) ?[]const u8 { + if (this.content_type.len > 0) { + return this.content_type; + } + if (this.store) |store| { + switch (store.data) { + .file => |file| { + return file.mime_type.value; + }, + .s3 => |s3| { + return s3.mime_type.value; + }, + else => return null, + } + } + return null; +} + +pub fn isBunFile(this: *const Blob) bool { + const store = this.store orelse return false; + + return store.data == .file; +} + +pub fn doReadFromS3(this: *Blob, comptime Function: anytype, global: *JSGlobalObject) JSValue { + debug("doReadFromS3", .{}); + + const WrappedFn = struct { + pub fn wrapped(b: *Blob, g: *JSGlobalObject, by: []u8) JSC.JSValue { + return JSC.toJSHostValue(g, Function(b, g, by, .clone)); + } + }; + return S3BlobDownloadTask.init(global, this, WrappedFn.wrapped); +} +pub fn doReadFile(this: *Blob, comptime Function: anytype, global: *JSGlobalObject) JSValue { + debug("doReadFile", .{}); + + const Handler = NewReadFileHandler(Function); + + var handler = bun.new(Handler, .{ + .context = this.*, + .globalThis = global, + }); + + if (Environment.isWindows) { + var promise = JSPromise.create(global); + const promise_value = promise.asValue(global); + promise_value.ensureStillAlive(); + handler.promise.strong.set(global, promise_value); + + read_file.ReadFileUV.start(handler.globalThis.bunVM().uvLoop(), this.store.?, this.offset, this.size, Handler, handler); + + return promise_value; + } + + const file_read = read_file.ReadFile.create( + bun.default_allocator, + this.store.?, + this.offset, + this.size, + *Handler, + handler, + Handler.run, + ) catch bun.outOfMemory(); + var read_file_task = read_file.ReadFileTask.createOnJSThread(bun.default_allocator, global, file_read) catch bun.outOfMemory(); + + // Create the Promise only after the store has been ref()'d. + // The garbage collector runs on memory allocations + // The JSPromise is the next GC'd memory allocation. + // This shouldn't really fix anything, but it's a little safer. + var promise = JSPromise.create(global); + const promise_value = promise.asValue(global); + promise_value.ensureStillAlive(); + handler.promise.strong.set(global, promise_value); + + read_file_task.schedule(); + + debug("doReadFile: read_file_task scheduled", .{}); + return promise_value; +} + +pub fn NewInternalReadFileHandler(comptime Context: type, comptime Function: anytype) type { + return struct { + pub fn run(handler: *anyopaque, bytes: read_file.ReadFileResultType) void { + Function(bun.cast(Context, handler), bytes); + } + }; +} + +pub fn doReadFileInternal(this: *Blob, comptime Handler: type, ctx: Handler, comptime Function: anytype, global: *JSGlobalObject) void { + if (Environment.isWindows) { + const ReadFileHandler = NewInternalReadFileHandler(Handler, Function); + return read_file.ReadFileUV.start(libuv.Loop.get(), this.store.?, this.offset, this.size, ReadFileHandler, ctx); + } + const file_read = read_file.ReadFile.createWithCtx( + bun.default_allocator, + this.store.?, + ctx, + NewInternalReadFileHandler(Handler, Function).run, + this.offset, + this.size, + ) catch bun.outOfMemory(); + var read_file_task = read_file.ReadFileTask.createOnJSThread(bun.default_allocator, global, file_read) catch bun.outOfMemory(); + read_file_task.schedule(); +} + +const FormDataContext = struct { + allocator: std.mem.Allocator, + joiner: StringJoiner, + boundary: []const u8, + failed: bool = false, + globalThis: *JSC.JSGlobalObject, + + pub fn onEntry(this: *FormDataContext, name: ZigString, entry: JSC.DOMFormData.FormDataEntry) void { + if (this.failed) return; + var globalThis = this.globalThis; + + const allocator = this.allocator; + const joiner = &this.joiner; + const boundary = this.boundary; + + joiner.pushStatic("--"); + joiner.pushStatic(boundary); // note: "static" here means "outlives the joiner" + joiner.pushStatic("\r\n"); + + joiner.pushStatic("Content-Disposition: form-data; name=\""); + const name_slice = name.toSlice(allocator); + joiner.push(name_slice.slice(), name_slice.allocator.get()); + + switch (entry) { + .string => |value| { + joiner.pushStatic("\"\r\n\r\n"); + const value_slice = value.toSlice(allocator); + joiner.push(value_slice.slice(), value_slice.allocator.get()); + }, + .file => |value| { + joiner.pushStatic("\"; filename=\""); + const filename_slice = value.filename.toSlice(allocator); + joiner.push(filename_slice.slice(), filename_slice.allocator.get()); + joiner.pushStatic("\"\r\n"); + + const blob = value.blob; + const content_type = if (blob.content_type.len > 0) blob.content_type else "application/octet-stream"; + joiner.pushStatic("Content-Type: "); + joiner.pushStatic(content_type); + joiner.pushStatic("\r\n\r\n"); + + if (blob.store) |store| { + if (blob.size == Blob.max_size) { + blob.resolveSize(); + } + switch (store.data) { + .s3 => |_| { + // TODO: s3 + // we need to make this async and use download/downloadSlice + }, + .file => |file| { + + // TODO: make this async + lazy + const res = JSC.Node.fs.NodeFS.readFile( + globalThis.bunVM().nodeFS(), + .{ + .encoding = .buffer, + .path = file.pathlike, + .offset = blob.offset, + .max_size = blob.size, + }, + .sync, + ); + + switch (res) { + .err => |err| { + globalThis.throwValue(err.toJSC(globalThis)) catch {}; + this.failed = true; + }, + .result => |result| { + joiner.push(result.slice(), result.buffer.allocator); + }, + } + }, + .bytes => |_| { + joiner.pushStatic(blob.sharedView()); + }, + } + } + }, + } + + joiner.pushStatic("\r\n"); + } +}; + +pub fn getContentType( + this: *Blob, +) ?ZigString.Slice { + if (this.content_type.len > 0) + return ZigString.Slice.fromUTF8NeverFree(this.content_type); + + return null; +} + +const StructuredCloneWriter = struct { + ctx: *anyopaque, + impl: *const fn (*anyopaque, ptr: [*]const u8, len: u32) callconv(JSC.conv) void, + + pub const WriteError = error{}; + pub fn write(this: StructuredCloneWriter, bytes: []const u8) WriteError!usize { + this.impl(this.ctx, bytes.ptr, @as(u32, @truncate(bytes.len))); + return bytes.len; + } +}; + +fn _onStructuredCloneSerialize( + this: *Blob, + comptime Writer: type, + writer: Writer, +) !void { + try writer.writeInt(u8, serialization_version, .little); + + try writer.writeInt(u64, @intCast(this.offset), .little); + + try writer.writeInt(u32, @truncate(this.content_type.len), .little); + try writer.writeAll(this.content_type); + try writer.writeInt(u8, @intFromBool(this.content_type_was_set), .little); + + const store_tag: Store.SerializeTag = if (this.store) |store| + if (store.data == .file) .file else .bytes + else + .empty; + + try writer.writeInt(u8, @intFromEnum(store_tag), .little); + + this.resolveSize(); + if (this.store) |store| { + try store.serialize(Writer, writer); + } + + try writer.writeInt(u8, @intFromBool(this.is_jsdom_file), .little); + try writeFloat(f64, this.last_modified, Writer, writer); +} + +pub fn onStructuredCloneSerialize( + this: *Blob, + globalThis: *JSC.JSGlobalObject, + ctx: *anyopaque, + writeBytes: *const fn (*anyopaque, ptr: [*]const u8, len: u32) callconv(JSC.conv) void, +) void { + _ = globalThis; + + const Writer = std.io.Writer(StructuredCloneWriter, StructuredCloneWriter.WriteError, StructuredCloneWriter.write); + const writer = Writer{ + .context = .{ + .ctx = ctx, + .impl = writeBytes, + }, + }; + + try _onStructuredCloneSerialize(this, Writer, writer); +} + +pub fn onStructuredCloneTransfer( + this: *Blob, + globalThis: *JSC.JSGlobalObject, + ctx: *anyopaque, + write: *const fn (*anyopaque, ptr: [*]const u8, len: usize) callconv(.C) void, +) void { + _ = write; + _ = ctx; + _ = this; + _ = globalThis; +} + +fn writeFloat( + comptime FloatType: type, + value: FloatType, + comptime Writer: type, + writer: Writer, +) !void { + const bytes: [@sizeOf(FloatType)]u8 = @bitCast(value); + try writer.writeAll(&bytes); +} + +fn readFloat( + comptime FloatType: type, + comptime Reader: type, + reader: Reader, +) !FloatType { + const bytes = try reader.readBoundedBytes(@sizeOf(FloatType)); + return @bitCast(bytes.slice()[0..@sizeOf(FloatType)].*); +} + +fn readSlice( + reader: anytype, + len: usize, + allocator: std.mem.Allocator, +) ![]u8 { + var slice = try allocator.alloc(u8, len); + slice = slice[0..try reader.read(slice)]; + if (slice.len != len) return error.TooSmall; + return slice; +} + +fn _onStructuredCloneDeserialize( + globalThis: *JSC.JSGlobalObject, + comptime Reader: type, + reader: Reader, +) !JSValue { + const allocator = bun.default_allocator; + + const version = try reader.readInt(u8, .little); + + const offset = try reader.readInt(u64, .little); + + const content_type_len = try reader.readInt(u32, .little); + + const content_type = try readSlice(reader, content_type_len, allocator); + + const content_type_was_set: bool = try reader.readInt(u8, .little) != 0; + + const store_tag = try reader.readEnum(Store.SerializeTag, .little); + + const blob: *Blob = switch (store_tag) { + .bytes => bytes: { + const bytes_len = try reader.readInt(u32, .little); + const bytes = try readSlice(reader, bytes_len, allocator); + + const blob = Blob.init(bytes, allocator, globalThis); + + versions: { + if (version == 1) break :versions; + + const name_len = try reader.readInt(u32, .little); + const name = try readSlice(reader, name_len, allocator); + + if (blob.store) |store| switch (store.data) { + .bytes => |*bytes_store| bytes_store.stored_name = bun.PathString.init(name), + else => {}, + }; + + if (version == 2) break :versions; + } + + break :bytes Blob.new(blob); + }, + .file => file: { + const pathlike_tag = try reader.readEnum(JSC.Node.PathOrFileDescriptor.SerializeTag, .little); + + switch (pathlike_tag) { + .fd => { + const fd = try reader.readStruct(bun.FD); + + var path_or_fd = JSC.Node.PathOrFileDescriptor{ + .fd = fd, + }; + const blob = Blob.new(Blob.findOrCreateFileFromPath( + &path_or_fd, + globalThis, + true, + )); + + break :file blob; + }, + .path => { + const path_len = try reader.readInt(u32, .little); + + const path = try readSlice(reader, path_len, default_allocator); + var dest = JSC.Node.PathOrFileDescriptor{ + .path = .{ + .string = bun.PathString.init(path), + }, + }; + const blob = Blob.new(Blob.findOrCreateFileFromPath( + &dest, + globalThis, + true, + )); + + break :file blob; + }, + } + + return .zero; + }, + .empty => Blob.new(Blob.initEmpty(globalThis)), + }; + + versions: { + if (version == 1) break :versions; + + blob.is_jsdom_file = try reader.readInt(u8, .little) != 0; + blob.last_modified = try readFloat(f64, Reader, reader); + + if (version == 2) break :versions; + } + + blob.allocator = allocator; + blob.offset = @as(u52, @intCast(offset)); + if (content_type.len > 0) { + blob.content_type = content_type; + blob.content_type_allocated = true; + blob.content_type_was_set = content_type_was_set; + } + + return blob.toJS(globalThis); +} + +pub fn onStructuredCloneDeserialize(globalThis: *JSC.JSGlobalObject, ptr: [*]u8, end: [*]u8) bun.JSError!JSValue { + const total_length: usize = @intFromPtr(end) - @intFromPtr(ptr); + var buffer_stream = std.io.fixedBufferStream(ptr[0..total_length]); + const reader = buffer_stream.reader(); + + return _onStructuredCloneDeserialize(globalThis, @TypeOf(reader), reader) catch |err| switch (err) { + error.EndOfStream, error.TooSmall, error.InvalidValue => { + return globalThis.throw("Blob.onStructuredCloneDeserialize failed", .{}); + }, + error.OutOfMemory => { + return globalThis.throwOutOfMemory(); + }, + }; +} + +const URLSearchParamsConverter = struct { + allocator: std.mem.Allocator, + buf: []u8 = "", + globalThis: *JSC.JSGlobalObject, + pub fn convert(this: *URLSearchParamsConverter, str: ZigString) void { + var out = str.toSlice(this.allocator).cloneIfNeeded(this.allocator) catch bun.outOfMemory(); + this.buf = @constCast(out.slice()); + } +}; + +pub fn fromURLSearchParams( + globalThis: *JSC.JSGlobalObject, + allocator: std.mem.Allocator, + search_params: *JSC.URLSearchParams, +) Blob { + var converter = URLSearchParamsConverter{ + .allocator = allocator, + .globalThis = globalThis, + }; + search_params.toString(URLSearchParamsConverter, &converter, URLSearchParamsConverter.convert); + var store = Blob.Store.init(converter.buf, allocator); + store.mime_type = MimeType.all.@"application/x-www-form-urlencoded"; + + var blob = Blob.initWithStore(store, globalThis); + blob.content_type = store.mime_type.value; + blob.content_type_was_set = true; + return blob; +} + +pub fn fromDOMFormData( + globalThis: *JSC.JSGlobalObject, + allocator: std.mem.Allocator, + form_data: *JSC.DOMFormData, +) Blob { + var arena = bun.ArenaAllocator.init(allocator); + defer arena.deinit(); + var stack_allocator = std.heap.stackFallback(1024, arena.allocator()); + const stack_mem_all = stack_allocator.get(); + + var hex_buf: [70]u8 = undefined; + const boundary = brk: { + var random = globalThis.bunVM().rareData().nextUUID().bytes; + const formatter = std.fmt.fmtSliceHexLower(&random); + break :brk std.fmt.bufPrint(&hex_buf, "-WebkitFormBoundary{any}", .{formatter}) catch unreachable; + }; + + var context = FormDataContext{ + .allocator = allocator, + .joiner = .{ .allocator = stack_mem_all }, + .boundary = boundary, + .globalThis = globalThis, + }; + + form_data.forEach(FormDataContext, &context, FormDataContext.onEntry); + if (context.failed) { + return Blob.initEmpty(globalThis); + } + + context.joiner.pushStatic("--"); + context.joiner.pushStatic(boundary); + context.joiner.pushStatic("--\r\n"); + + const store = Blob.Store.init(context.joiner.done(allocator) catch bun.outOfMemory(), allocator); + var blob = Blob.initWithStore(store, globalThis); + blob.content_type = std.fmt.allocPrint(allocator, "multipart/form-data; boundary={s}", .{boundary}) catch bun.outOfMemory(); + blob.content_type_allocated = true; + blob.content_type_was_set = true; + + return blob; +} + +pub fn contentType(this: *const Blob) string { + return this.content_type; +} + +pub fn isDetached(this: *const Blob) bool { + return this.store == null; +} + +export fn Blob__dupeFromJS(value: JSC.JSValue) ?*Blob { + const this = Blob.fromJS(value) orelse return null; + return Blob__dupe(this); +} + +export fn Blob__setAsFile(this: *Blob, path_str: *bun.String) *Blob { + this.is_jsdom_file = true; + + // This is not 100% correct... + if (this.store) |store| { + if (store.data == .bytes) { + if (store.data.bytes.stored_name.len == 0) { + var utf8 = path_str.toUTF8WithoutRef(bun.default_allocator).clone(bun.default_allocator) catch unreachable; + store.data.bytes.stored_name = bun.PathString.init(utf8.slice()); + } + } + } + + return this; +} + +export fn Blob__dupe(ptr: *anyopaque) *Blob { + const this = bun.cast(*Blob, ptr); + const new_ptr = new(this.dupeWithContentType(true)); + new_ptr.allocator = bun.default_allocator; + return new_ptr; +} + +export fn Blob__destroy(this: *Blob) void { + this.finalize(); +} + +export fn Blob__getFileNameString(this: *Blob) callconv(.C) bun.String { + if (this.getFileName()) |filename| { + return bun.String.fromBytes(filename); + } + + return bun.String.empty; +} + +comptime { + _ = Blob__dupeFromJS; + _ = Blob__destroy; + _ = Blob__dupe; + _ = Blob__setAsFile; + _ = Blob__getFileNameString; +} + +pub fn writeFormatForSize(is_jdom_file: bool, size: usize, writer: anytype, comptime enable_ansi_colors: bool) !void { + if (is_jdom_file) { + try writer.writeAll(comptime Output.prettyFmt("File", enable_ansi_colors)); + } else { + try writer.writeAll(comptime Output.prettyFmt("Blob", enable_ansi_colors)); + } + try writer.print( + comptime Output.prettyFmt(" ({any})", enable_ansi_colors), + .{ + bun.fmt.size(size, .{}), + }, + ); +} + +pub fn writeFormat(this: *Blob, comptime Formatter: type, formatter: *Formatter, writer: anytype, comptime enable_ansi_colors: bool) !void { + const Writer = @TypeOf(writer); + + if (this.isDetached()) { + if (this.is_jsdom_file) { + try writer.writeAll(comptime Output.prettyFmt("[File detached]", enable_ansi_colors)); + } else { + try writer.writeAll(comptime Output.prettyFmt("[Blob detached]", enable_ansi_colors)); + } + return; + } + + { + const store = this.store.?; + switch (store.data) { + .s3 => |*s3| { + try S3File.writeFormat(s3, Formatter, formatter, writer, enable_ansi_colors, this.content_type, this.offset); + }, + .file => |file| { + try writer.writeAll(comptime Output.prettyFmt("FileRef", enable_ansi_colors)); + switch (file.pathlike) { + .path => |path| { + try writer.print( + comptime Output.prettyFmt(" (\"{s}\")", enable_ansi_colors), + .{ + path.slice(), + }, + ); + }, + .fd => |fd| { + if (Environment.isWindows) { + switch (fd.decodeWindows()) { + .uv => |uv_file| try writer.print( + comptime Output.prettyFmt(" (fd: {d})", enable_ansi_colors), + .{uv_file}, + ), + .windows => |handle| { + if (Environment.isDebug) { + @panic("this shouldn't be reachable."); + } + try writer.print( + comptime Output.prettyFmt(" (fd: 0x{x})", enable_ansi_colors), + .{@intFromPtr(handle)}, + ); + }, + } + } else { + try writer.print( + comptime Output.prettyFmt(" (fd: {d})", enable_ansi_colors), + .{fd.native()}, + ); + } + }, + } + }, + .bytes => { + try writeFormatForSize(this.is_jsdom_file, this.size, writer, enable_ansi_colors); + }, + } + } + + const show_name = (this.is_jsdom_file and this.getNameString() != null) or (!this.name.isEmpty() and this.store != null and this.store.?.data == .bytes); + if (!this.isS3() and (this.content_type.len > 0 or this.offset > 0 or show_name or this.last_modified != 0.0)) { + try writer.writeAll(" {\n"); + { + formatter.indent += 1; + defer formatter.indent -= 1; + + if (show_name) { + try formatter.writeIndent(Writer, writer); + + try writer.print( + comptime Output.prettyFmt("name: \"{}\"", enable_ansi_colors), + .{ + this.getNameString() orelse bun.String.empty, + }, + ); + + if (this.content_type.len > 0 or this.offset > 0 or this.last_modified != 0) { + try formatter.printComma(Writer, writer, enable_ansi_colors); + } + + try writer.writeAll("\n"); + } + + if (this.content_type.len > 0) { + try formatter.writeIndent(Writer, writer); + try writer.print( + comptime Output.prettyFmt("type: \"{s}\"", enable_ansi_colors), + .{ + this.content_type, + }, + ); + + if (this.offset > 0 or this.last_modified != 0) { + try formatter.printComma(Writer, writer, enable_ansi_colors); + } + + try writer.writeAll("\n"); + } + + if (this.offset > 0) { + try formatter.writeIndent(Writer, writer); + + try writer.print( + comptime Output.prettyFmt("offset: {d}\n", enable_ansi_colors), + .{ + this.offset, + }, + ); + + if (this.last_modified != 0) { + try formatter.printComma(Writer, writer, enable_ansi_colors); + } + + try writer.writeAll("\n"); + } + + if (this.last_modified != 0) { + try formatter.writeIndent(Writer, writer); + + try writer.print( + comptime Output.prettyFmt("lastModified: {d}\n", enable_ansi_colors), + .{ + this.last_modified, + }, + ); + } + } + + try formatter.writeIndent(Writer, writer); + try writer.writeAll("}"); + } +} + +const Retry = enum { @"continue", fail, no }; + +// TODO: move this to bun.sys? +// we choose not to inline this so that the path buffer is not on the stack unless necessary. +pub noinline fn mkdirIfNotExists(this: anytype, err: bun.sys.Error, path_string: [:0]const u8, err_path: []const u8) Retry { + if (err.getErrno() == .NOENT and this.mkdirp_if_not_exists) { + if (std.fs.path.dirname(path_string)) |dirname| { + var node_fs: JSC.Node.fs.NodeFS = .{}; + switch (node_fs.mkdirRecursive(.{ + .path = .{ .string = bun.PathString.init(dirname) }, + .recursive = true, + .always_return_none = true, + })) { + .result => { + this.mkdirp_if_not_exists = false; + return .@"continue"; + }, + .err => |err2| { + if (comptime @hasField(@TypeOf(this.*), "errno")) { + this.errno = bun.errnoToZigErr(err2.errno); + } + this.system_error = err.withPath(err_path).toSystemError(); + if (comptime @hasField(@TypeOf(this.*), "opened_fd")) { + this.opened_fd = invalid_fd; + } + return .fail; + }, + } + } + } + return .no; +} + +/// Write an empty string to a file by truncating it. +/// +/// This behavior matches what we do with the fast path. +/// +/// Returns an encoded `*JSPromise` that resolves if the file +/// - doesn't exist and is created +/// - exists and is truncated +fn writeFileWithEmptySourceToDestination( + ctx: *JSC.JSGlobalObject, + destination_blob: *Blob, + options: WriteFileOptions, +) JSC.JSValue { + // SAFETY: null-checked by caller + const destination_store = destination_blob.store.?; + defer destination_blob.detach(); + + switch (destination_store.data) { + .file => |file| { + // TODO: make this async + const node_fs = ctx.bunVM().nodeFS(); + var result = node_fs.truncate(.{ + .path = file.pathlike, + .len = 0, + .flags = bun.O.CREAT, + }, .sync); + + if (result == .err) { + const errno = result.err.getErrno(); + var was_eperm = false; + err: switch (errno) { + // truncate might return EPERM when the parent directory doesn't exist + // #6336 + .PERM => { + was_eperm = true; + result.err.errno = @intCast(@intFromEnum(bun.sys.E.NOENT)); + continue :err .NOENT; + }, + .NOENT => { + if (options.mkdirp_if_not_exists == false) break :err; + // NOTE: if .err is PERM, it ~should~ really is a + // permissions issue + const dirpath: []const u8 = switch (file.pathlike) { + .path => |path| std.fs.path.dirname(path.slice()) orelse break :err, + .fd => { + // NOTE: if this is an fd, it means the file + // exists, so we shouldn't try to mkdir it + // also means PERM is _actually_ a + // permissions issue + if (was_eperm) result.err.errno = @intCast(@intFromEnum(bun.sys.E.PERM)); + break :err; + }, + }; + const mkdir_result = node_fs.mkdirRecursive(.{ + .path = .{ .string = bun.PathString.init(dirpath) }, + // TODO: Do we really want .mode to be 0o777? + .recursive = true, + .always_return_none = true, + }); + if (mkdir_result == .err) { + result.err = mkdir_result.err; + break :err; + } + + // SAFETY: we check if `file.pathlike` is an fd or + // not above, returning if it is. + var buf: bun.PathBuffer = undefined; + // TODO: respect `options.mode` + const mode: bun.Mode = JSC.Node.fs.default_permission; + while (true) { + const open_res = bun.sys.open(file.pathlike.path.sliceZ(&buf), bun.O.CREAT | bun.O.TRUNC, mode); + switch (open_res) { + // errors fall through and are handled below + .err => |err| { + if (err.getErrno() == .INTR) continue; + result.err = open_res.err; + break :err; + }, + .result => |fd| { + fd.close(); + return JSC.JSPromise.resolvedPromiseValue(ctx, .jsNumber(0)); + }, + } + } + }, + else => {}, + } + + result.err = result.err.withPathLike(file.pathlike); + return JSC.JSPromise.dangerouslyCreateRejectedPromiseValueWithoutNotifyingVM(ctx, result.toJS(ctx)); + } + }, + .s3 => |*s3| { + + // create empty file + var aws_options = s3.getCredentialsWithOptions(options.extra_options, ctx) catch |err| { + return JSC.JSPromise.dangerouslyCreateRejectedPromiseValueWithoutNotifyingVM(ctx, ctx.takeException(err)); + }; + defer aws_options.deinit(); + + const Wrapper = struct { + promise: JSC.JSPromise.Strong, + store: *Store, + global: *JSC.JSGlobalObject, + + pub const new = bun.TrivialNew(@This()); + + pub fn resolve(result: S3.S3UploadResult, opaque_this: *anyopaque) void { + const this: *@This() = @ptrCast(@alignCast(opaque_this)); + switch (result) { + .success => this.promise.resolve(this.global, JSC.jsNumber(0)), + .failure => |err| this.promise.reject(this.global, err.toJS(this.global, this.store.getPath())), + } + this.deinit(); + } + + fn deinit(this: *@This()) void { + this.promise.deinit(); + this.store.deref(); + bun.destroy(this); + } + }; + + const promise = JSC.JSPromise.Strong.init(ctx); + const promise_value = promise.value(); + const proxy = ctx.bunVM().transpiler.env.getHttpProxy(true, null); + const proxy_url = if (proxy) |p| p.href else null; + destination_store.ref(); + S3.upload( + &aws_options.credentials, + s3.path(), + "", + destination_blob.contentTypeOrMimeType(), + aws_options.acl, + proxy_url, + aws_options.storage_class, + Wrapper.resolve, + Wrapper.new(.{ + .promise = promise, + .store = destination_store, + .global = ctx, + }), + ); + return promise_value; + }, + // Writing to a buffer-backed blob should be a type error, + // making this unreachable. TODO: `{}` -> `unreachable` + .bytes => {}, + } + + return JSC.JSPromise.resolvedPromiseValue(ctx, JSC.JSValue.jsNumber(0)); +} + +pub fn writeFileWithSourceDestination( + ctx: *JSC.JSGlobalObject, + source_blob: *Blob, + destination_blob: *Blob, + options: WriteFileOptions, +) JSC.JSValue { + const destination_store = destination_blob.store orelse Output.panic("Destination blob is detached", .{}); + const destination_type = std.meta.activeTag(destination_store.data); + + // TODO: make sure this invariant isn't being broken elsewhere (outside + // its usage from `Blob.writeFileInternal`), then upgrade this to + // Environment.allow_assert + if (Environment.isDebug) { + bun.assertf(destination_type != .bytes, "Cannot write to a Blob backed by a Buffer or TypedArray. This is a bug in the caller. Please report it to the Bun team.", .{}); + } + + const source_store = source_blob.store orelse return writeFileWithEmptySourceToDestination(ctx, destination_blob, options); + const source_type = std.meta.activeTag(source_store.data); + + if (destination_type == .file and source_type == .bytes) { + var write_file_promise = bun.new(WriteFilePromise, .{ + .globalThis = ctx, + }); + + if (comptime Environment.isWindows) { + var promise = JSPromise.create(ctx); + const promise_value = promise.asValue(ctx); + promise_value.ensureStillAlive(); + write_file_promise.promise.strong.set(ctx, promise_value); + _ = write_file.WriteFileWindows.create( + ctx.bunVM().eventLoop(), + destination_blob.*, + source_blob.*, + *WriteFilePromise, + write_file_promise, + &WriteFilePromise.run, + options.mkdirp_if_not_exists orelse true, + ); + return promise_value; + } + + const file_copier = write_file.WriteFile.create( + destination_blob.*, + source_blob.*, + *WriteFilePromise, + write_file_promise, + WriteFilePromise.run, + options.mkdirp_if_not_exists orelse true, + ) catch unreachable; + var task = write_file.WriteFileTask.createOnJSThread(bun.default_allocator, ctx, file_copier) catch bun.outOfMemory(); + // Defer promise creation until we're just about to schedule the task + var promise = JSC.JSPromise.create(ctx); + const promise_value = promise.asValue(ctx); + write_file_promise.promise.strong.set(ctx, promise_value); + promise_value.ensureStillAlive(); + task.schedule(); + return promise_value; + } + // If this is file <> file, we can just copy the file + else if (destination_type == .file and source_type == .file) { + if (comptime Environment.isWindows) { + return Blob.copy_file.CopyFileWindows.init( + destination_store, + source_store, + ctx.bunVM().eventLoop(), + options.mkdirp_if_not_exists orelse true, + destination_blob.size, + ); + } + var file_copier = copy_file.CopyFile.create( + bun.default_allocator, + destination_store, + source_store, + destination_blob.offset, + destination_blob.size, + ctx, + options.mkdirp_if_not_exists orelse true, + ) catch unreachable; + file_copier.schedule(); + return file_copier.promise.value(); + } else if (destination_type == .file and source_type == .s3) { + const s3 = &source_store.data.s3; + if (JSC.WebCore.ReadableStream.fromJS(JSC.WebCore.ReadableStream.fromBlob( + ctx, + source_blob, + @truncate(s3.options.partSize), + ), ctx)) |stream| { + return destination_blob.pipeReadableStreamToBlob(ctx, stream, options.extra_options); + } else { + return JSC.JSPromise.dangerouslyCreateRejectedPromiseValueWithoutNotifyingVM(ctx, ctx.createErrorInstance("Failed to stream bytes from s3 bucket", .{})); + } + } else if (destination_type == .bytes and source_type == .bytes) { + // If this is bytes <> bytes, we can just duplicate it + // this is an edgecase + // it will happen if someone did Bun.write(new Blob([123]), new Blob([456])) + // eventually, this could be like Buffer.concat + var clone = source_blob.dupe(); + clone.allocator = bun.default_allocator; + const cloned = Blob.new(clone); + cloned.allocator = bun.default_allocator; + return JSPromise.resolvedPromiseValue(ctx, cloned.toJS(ctx)); + } else if (destination_type == .bytes and (source_type == .file or source_type == .s3)) { + const blob_value = source_blob.getSliceFrom(ctx, 0, 0, "", false); + + return JSPromise.resolvedPromiseValue( + ctx, + blob_value, + ); + } else if (destination_type == .s3) { + const s3 = &destination_store.data.s3; + var aws_options = s3.getCredentialsWithOptions(options.extra_options, ctx) catch |err| { + return JSC.JSPromise.dangerouslyCreateRejectedPromiseValueWithoutNotifyingVM(ctx, ctx.takeException(err)); + }; + defer aws_options.deinit(); + const proxy = ctx.bunVM().transpiler.env.getHttpProxy(true, null); + const proxy_url = if (proxy) |p| p.href else null; + switch (source_store.data) { + .bytes => |bytes| { + if (bytes.len > S3.MultiPartUploadOptions.MAX_SINGLE_UPLOAD_SIZE) { + if (JSC.WebCore.ReadableStream.fromJS(JSC.WebCore.ReadableStream.fromBlob( + ctx, + source_blob, + @truncate(s3.options.partSize), + ), ctx)) |stream| { + return S3.uploadStream( + (if (options.extra_options != null) aws_options.credentials.dupe() else s3.getCredentials()), + s3.path(), + stream, + ctx, + aws_options.options, + aws_options.acl, + aws_options.storage_class, + destination_blob.contentTypeOrMimeType(), + proxy_url, + null, + undefined, + ); + } else { + return JSC.JSPromise.dangerouslyCreateRejectedPromiseValueWithoutNotifyingVM(ctx, ctx.createErrorInstance("Failed to stream bytes to s3 bucket", .{})); + } + } else { + const Wrapper = struct { + store: *Store, + promise: JSC.JSPromise.Strong, + global: *JSC.JSGlobalObject, + + pub const new = bun.TrivialNew(@This()); + + pub fn resolve(result: S3.S3UploadResult, opaque_self: *anyopaque) void { + const this: *@This() = @ptrCast(@alignCast(opaque_self)); + switch (result) { + .success => this.promise.resolve(this.global, JSC.jsNumber(this.store.data.bytes.len)), + .failure => |err| this.promise.reject(this.global, err.toJS(this.global, this.store.getPath())), + } + this.deinit(); + } + + fn deinit(this: *@This()) void { + this.promise.deinit(); + this.store.deref(); + } + }; + source_store.ref(); + const promise = JSC.JSPromise.Strong.init(ctx); + const promise_value = promise.value(); + + S3.upload( + &aws_options.credentials, + s3.path(), + bytes.slice(), + destination_blob.contentTypeOrMimeType(), + aws_options.acl, + proxy_url, + aws_options.storage_class, + Wrapper.resolve, + Wrapper.new(.{ + .store = source_store, + .promise = promise, + .global = ctx, + }), + ); + return promise_value; + } + }, + .file, .s3 => { + // stream + if (JSC.WebCore.ReadableStream.fromJS(JSC.WebCore.ReadableStream.fromBlob( + ctx, + source_blob, + @truncate(s3.options.partSize), + ), ctx)) |stream| { + return S3.uploadStream( + (if (options.extra_options != null) aws_options.credentials.dupe() else s3.getCredentials()), + s3.path(), + stream, + ctx, + s3.options, + aws_options.acl, + aws_options.storage_class, + destination_blob.contentTypeOrMimeType(), + proxy_url, + null, + undefined, + ); + } else { + return JSC.JSPromise.dangerouslyCreateRejectedPromiseValueWithoutNotifyingVM(ctx, ctx.createErrorInstance("Failed to stream bytes to s3 bucket", .{})); + } + }, + } + } + + unreachable; +} + +const WriteFileOptions = struct { + mkdirp_if_not_exists: ?bool = null, + extra_options: ?JSValue = null, +}; + +/// ## Errors +/// - If `path_or_blob` is a detached blob +/// ## Panics +/// - If `path_or_blob` is a `Blob` backed by a byte store +pub fn writeFileInternal(globalThis: *JSC.JSGlobalObject, path_or_blob_: *PathOrBlob, data: JSC.JSValue, options: WriteFileOptions) bun.JSError!JSC.JSValue { + if (data.isEmptyOrUndefinedOrNull()) { + return globalThis.throwInvalidArguments("Bun.write(pathOrFdOrBlob, blob) expects a Blob-y thing to write", .{}); + } + var path_or_blob = path_or_blob_.*; + if (path_or_blob == .blob) { + const blob_store = path_or_blob.blob.store orelse { + return globalThis.throwInvalidArguments("Blob is detached", .{}); + }; + bun.assertWithLocation(blob_store.data != .bytes, @src()); + // TODO only reset last_modified on success paths instead of + // resetting last_modified at the beginning for better performance. + if (blob_store.data == .file) { + // reset last_modified to force getLastModified() to reload after writing. + blob_store.data.file.last_modified = JSC.init_timestamp; + } + } + + const input_store: ?*Store = if (path_or_blob == .blob) path_or_blob.blob.store else null; + if (input_store) |st| st.ref(); + defer if (input_store) |st| st.deref(); + + var needs_async = false; + + if (options.mkdirp_if_not_exists) |mkdir| { + if (mkdir and + path_or_blob == .blob and + path_or_blob.blob.store != null and + path_or_blob.blob.store.?.data == .file and + path_or_blob.blob.store.?.data.file.pathlike == .fd) + { + return globalThis.throwInvalidArguments("Cannot create a directory for a file descriptor", .{}); + } + } + + // If you're doing Bun.write(), try to go fast by writing short input on the main thread. + // This is a heuristic, but it's a good one. + // + // except if you're on Windows. Windows I/O is slower. Let's not even try. + if (comptime !Environment.isWindows) { + if (path_or_blob == .path or + // If they try to set an offset, its a little more complicated so let's avoid that + (path_or_blob.blob.offset == 0 and !path_or_blob.blob.isS3() and + // Is this a file that is known to be a pipe? Let's avoid blocking the main thread on it. + !(path_or_blob.blob.store != null and + path_or_blob.blob.store.?.data == .file and + path_or_blob.blob.store.?.data.file.mode != 0 and + bun.isRegularFile(path_or_blob.blob.store.?.data.file.mode)))) + { + if (data.isString()) { + const len = data.getLength(globalThis); + + if (len < 256 * 1024) { + const str = try data.toBunString(globalThis); + defer str.deref(); + + const pathlike: JSC.Node.PathOrFileDescriptor = if (path_or_blob == .path) + path_or_blob.path + else + path_or_blob.blob.store.?.data.file.pathlike; + + if (pathlike == .path) { + const result = writeStringToFileFast( + globalThis, + pathlike, + str, + &needs_async, + true, + ); + if (!needs_async) { + return result; + } + } else { + const result = writeStringToFileFast( + globalThis, + pathlike, + str, + &needs_async, + false, + ); + if (!needs_async) { + return result; + } + } + } + } else if (data.asArrayBuffer(globalThis)) |buffer_view| { + if (buffer_view.byte_len < 256 * 1024) { + const pathlike: JSC.Node.PathOrFileDescriptor = if (path_or_blob == .path) + path_or_blob.path + else + path_or_blob.blob.store.?.data.file.pathlike; + + if (pathlike == .path) { + const result = writeBytesToFileFast( + globalThis, + pathlike, + buffer_view.byteSlice(), + &needs_async, + true, + ); + + if (!needs_async) { + return result; + } + } else { + const result = writeBytesToFileFast( + globalThis, + pathlike, + buffer_view.byteSlice(), + &needs_async, + false, + ); + + if (!needs_async) { + return result; + } + } + } + } + } + } + + // if path_or_blob is a path, convert it into a file blob + var destination_blob: Blob = if (path_or_blob == .path) brk: { + const new_blob = Blob.findOrCreateFileFromPath(&path_or_blob_.path, globalThis, true); + if (new_blob.store == null) { + return globalThis.throwInvalidArguments("Writing to an empty blob is not implemented yet", .{}); + } + break :brk new_blob; + } else path_or_blob.blob.dupe(); + + if (bun.Environment.allow_assert and path_or_blob == .blob) { + // sanity check. Should never happen because + // 1. destination blobs passed via path_or_blob are null checked at the very start + // 2. newly created blobs from paths get null checked immediately after creation. + bun.unsafeAssert(path_or_blob.blob.store != null); + } + + // TODO: implement a writeev() fast path + var source_blob: Blob = brk: { + if (data.as(Response)) |response| { + switch (response.body.value) { + .WTFStringImpl, + .InternalBlob, + .Used, + .Empty, + .Blob, + .Null, + => { + break :brk response.body.use(); + }, + .Error => |*err_ref| { + destination_blob.detach(); + _ = response.body.value.use(); + return JSC.JSPromise.dangerouslyCreateRejectedPromiseValueWithoutNotifyingVM(globalThis, err_ref.toJS(globalThis)); + }, + .Locked => |*locked| { + if (destination_blob.isS3()) { + const s3 = &destination_blob.store.?.data.s3; + var aws_options = try s3.getCredentialsWithOptions(options.extra_options, globalThis); + defer aws_options.deinit(); + _ = response.body.value.toReadableStream(globalThis); + if (locked.readable.get(globalThis)) |readable| { + if (readable.isDisturbed(globalThis)) { + destination_blob.detach(); + return globalThis.throwInvalidArguments("ReadableStream has already been used", .{}); + } + const proxy = globalThis.bunVM().transpiler.env.getHttpProxy(true, null); + const proxy_url = if (proxy) |p| p.href else null; + + return S3.uploadStream( + (if (options.extra_options != null) aws_options.credentials.dupe() else s3.getCredentials()), + s3.path(), + readable, + globalThis, + aws_options.options, + aws_options.acl, + aws_options.storage_class, + destination_blob.contentTypeOrMimeType(), + proxy_url, + null, + undefined, + ); + } + destination_blob.detach(); + return globalThis.throwInvalidArguments("ReadableStream has already been used", .{}); + } + var task = bun.new(WriteFileWaitFromLockedValueTask, .{ + .globalThis = globalThis, + .file_blob = destination_blob, + .promise = JSC.JSPromise.Strong.init(globalThis), + .mkdirp_if_not_exists = options.mkdirp_if_not_exists orelse true, + }); + + response.body.value.Locked.task = task; + response.body.value.Locked.onReceiveValue = WriteFileWaitFromLockedValueTask.thenWrap; + return task.promise.value(); + }, + } + } + + if (data.as(Request)) |request| { + switch (request.body.value) { + .WTFStringImpl, + .InternalBlob, + .Used, + .Empty, + .Blob, + .Null, + => { + break :brk request.body.value.use(); + }, + .Error => |*err_ref| { + destination_blob.detach(); + _ = request.body.value.use(); + return JSC.JSPromise.dangerouslyCreateRejectedPromiseValueWithoutNotifyingVM(globalThis, err_ref.toJS(globalThis)); + }, + .Locked => |locked| { + if (destination_blob.isS3()) { + const s3 = &destination_blob.store.?.data.s3; + var aws_options = try s3.getCredentialsWithOptions(options.extra_options, globalThis); + defer aws_options.deinit(); + _ = request.body.value.toReadableStream(globalThis); + if (locked.readable.get(globalThis)) |readable| { + if (readable.isDisturbed(globalThis)) { + destination_blob.detach(); + return globalThis.throwInvalidArguments("ReadableStream has already been used", .{}); + } + const proxy = globalThis.bunVM().transpiler.env.getHttpProxy(true, null); + const proxy_url = if (proxy) |p| p.href else null; + return S3.uploadStream( + (if (options.extra_options != null) aws_options.credentials.dupe() else s3.getCredentials()), + s3.path(), + readable, + globalThis, + aws_options.options, + aws_options.acl, + aws_options.storage_class, + destination_blob.contentTypeOrMimeType(), + proxy_url, + null, + undefined, + ); + } + destination_blob.detach(); + return globalThis.throwInvalidArguments("ReadableStream has already been used", .{}); + } + var task = bun.new(WriteFileWaitFromLockedValueTask, .{ + .globalThis = globalThis, + .file_blob = destination_blob, + .promise = JSC.JSPromise.Strong.init(globalThis), + .mkdirp_if_not_exists = options.mkdirp_if_not_exists orelse true, + }); + + request.body.value.Locked.task = task; + request.body.value.Locked.onReceiveValue = WriteFileWaitFromLockedValueTask.thenWrap; + + return task.promise.value(); + }, + } + } + + break :brk Blob.get( + globalThis, + data, + false, + false, + ) catch |err| { + if (err == error.InvalidArguments) { + return globalThis.throwInvalidArguments("Expected an Array", .{}); + } + return globalThis.throwOutOfMemory(); + }; + }; + defer source_blob.detach(); + + const destination_store = destination_blob.store; + if (destination_store) |store| { + store.ref(); + } + + defer { + if (destination_store) |store| { + store.deref(); + } + } + + return writeFileWithSourceDestination(globalThis, &source_blob, &destination_blob, options); +} + +/// `Bun.write(destination, input, options?)` +pub fn writeFile(globalThis: *JSC.JSGlobalObject, callframe: *JSC.CallFrame) bun.JSError!JSC.JSValue { + const arguments = callframe.arguments(); + var args = JSC.CallFrame.ArgumentsSlice.init(globalThis.bunVM(), arguments); + defer args.deinit(); + + // accept a path or a blob + var path_or_blob = try PathOrBlob.fromJSNoCopy(globalThis, &args); + defer { + if (path_or_blob == .path) { + path_or_blob.path.deinit(); + } + } + // "Blob" must actually be a BunFile, not a webcore blob. + if (path_or_blob == .blob) { + const store = path_or_blob.blob.store orelse { + return globalThis.throw("Cannot write to a detached Blob", .{}); + }; + if (store.data == .bytes) { + return globalThis.throwInvalidArguments("Cannot write to a Blob backed by bytes, which are always read-only", .{}); + } + } + + const data = args.nextEat() orelse { + return globalThis.throwInvalidArguments("Bun.write(pathOrFdOrBlob, blob) expects a Blob-y thing to write", .{}); + }; + var mkdirp_if_not_exists: ?bool = null; + const options = args.nextEat(); + if (options) |options_object| { + if (options_object.isObject()) { + if (try options_object.getTruthy(globalThis, "createPath")) |create_directory| { + if (!create_directory.isBoolean()) { + return globalThis.throwInvalidArgumentType("write", "options.createPath", "boolean"); + } + mkdirp_if_not_exists = create_directory.toBoolean(); + } + } else if (!options_object.isEmptyOrUndefinedOrNull()) { + return globalThis.throwInvalidArgumentType("write", "options", "object"); + } + } + return writeFileInternal(globalThis, &path_or_blob, data, .{ + .mkdirp_if_not_exists = mkdirp_if_not_exists, + .extra_options = options, + }); +} + +const write_permissions = 0o664; + +fn writeStringToFileFast( + globalThis: *JSC.JSGlobalObject, + pathlike: JSC.Node.PathOrFileDescriptor, + str: bun.String, + needs_async: *bool, + comptime needs_open: bool, +) JSC.JSValue { + const fd: bun.FileDescriptor = if (comptime !needs_open) pathlike.fd else brk: { + var file_path: bun.PathBuffer = undefined; + switch (bun.sys.open( + pathlike.path.sliceZ(&file_path), + // we deliberately don't use O_TRUNC here + // it's a perf optimization + bun.O.WRONLY | bun.O.CREAT | bun.O.NONBLOCK, + write_permissions, + )) { + .result => |result| { + break :brk result; + }, + .err => |err| { + if (err.getErrno() == .NOENT) { + needs_async.* = true; + return .zero; + } + + return JSC.JSPromise.dangerouslyCreateRejectedPromiseValueWithoutNotifyingVM( + globalThis, + err.withPath(pathlike.path.slice()).toJSC(globalThis), + ); + }, + } + unreachable; + }; + + var truncate = needs_open or str.isEmpty(); + const jsc_vm = globalThis.bunVM(); + var written: usize = 0; + + defer { + // we only truncate if it's a path + // if it's a file descriptor, we assume they want manual control over that behavior + if (truncate) { + _ = fd.truncate(@intCast(written)); + } + if (needs_open) { + fd.close(); + } + } + if (!str.isEmpty()) { + var decoded = str.toUTF8(jsc_vm.allocator); + defer decoded.deinit(); + + var remain = decoded.slice(); + while (remain.len > 0) { + const result = bun.sys.write(fd, remain); + switch (result) { + .result => |res| { + written += res; + remain = remain[res..]; + if (res == 0) break; + }, + .err => |err| { + truncate = false; + if (err.getErrno() == .AGAIN) { + needs_async.* = true; + return .zero; + } + if (comptime !needs_open) { + return JSC.JSPromise.dangerouslyCreateRejectedPromiseValueWithoutNotifyingVM(globalThis, err.toJSC(globalThis)); + } + return JSC.JSPromise.dangerouslyCreateRejectedPromiseValueWithoutNotifyingVM( + globalThis, + err.withPath(pathlike.path.slice()).toJSC(globalThis), + ); + }, + } + } + } + + return JSC.JSPromise.resolvedPromiseValue(globalThis, JSC.JSValue.jsNumber(written)); +} + +fn writeBytesToFileFast( + globalThis: *JSC.JSGlobalObject, + pathlike: JSC.Node.PathOrFileDescriptor, + bytes: []const u8, + needs_async: *bool, + comptime needs_open: bool, +) JSC.JSValue { + const fd: bun.FileDescriptor = if (comptime !needs_open) pathlike.fd else brk: { + var file_path: bun.PathBuffer = undefined; + switch (bun.sys.open( + pathlike.path.sliceZ(&file_path), + if (!Environment.isWindows) + // we deliberately don't use O_TRUNC here + // it's a perf optimization + bun.O.WRONLY | bun.O.CREAT | bun.O.NONBLOCK + else + bun.O.WRONLY | bun.O.CREAT, + write_permissions, + )) { + .result => |result| { + break :brk result; + }, + .err => |err| { + if (!Environment.isWindows) { + if (err.getErrno() == .NOENT) { + needs_async.* = true; + return .zero; + } + } + + return JSC.JSPromise.dangerouslyCreateRejectedPromiseValueWithoutNotifyingVM( + globalThis, + err.withPath(pathlike.path.slice()).toJSC(globalThis), + ); + }, + } + }; + + // TODO: on windows this is always synchronous + + const truncate = needs_open or bytes.len == 0; + var written: usize = 0; + defer if (needs_open) fd.close(); + + var remain = bytes; + const end = remain.ptr + remain.len; + + while (remain.ptr != end) { + const result = bun.sys.write(fd, remain); + switch (result) { + .result => |res| { + written += res; + remain = remain[res..]; + if (res == 0) break; + }, + .err => |err| { + if (!Environment.isWindows) { + if (err.getErrno() == .AGAIN) { + needs_async.* = true; + return .zero; + } + } + if (comptime !needs_open) { + return JSC.JSPromise.dangerouslyCreateRejectedPromiseValueWithoutNotifyingVM( + globalThis, + err.toJSC(globalThis), + ); + } + return JSC.JSPromise.dangerouslyCreateRejectedPromiseValueWithoutNotifyingVM( + globalThis, + err.withPath(pathlike.path.slice()).toJSC(globalThis), + ); + }, + } + } + + if (truncate) { + if (Environment.isWindows) { + _ = std.os.windows.kernel32.SetEndOfFile(fd.cast()); + } else { + _ = bun.sys.ftruncate(fd, @as(i64, @intCast(written))); + } + } + + return JSC.JSPromise.resolvedPromiseValue(globalThis, JSC.JSValue.jsNumber(written)); +} +export fn JSDOMFile__construct(globalThis: *JSC.JSGlobalObject, callframe: *JSC.CallFrame) callconv(JSC.conv) ?*Blob { + return JSDOMFile__construct_(globalThis, callframe) catch |err| switch (err) { + error.JSError => null, + error.OutOfMemory => { + globalThis.throwOutOfMemory() catch {}; + return null; + }, + }; +} +pub fn JSDOMFile__construct_(globalThis: *JSC.JSGlobalObject, callframe: *JSC.CallFrame) bun.JSError!*Blob { + JSC.markBinding(@src()); + const allocator = bun.default_allocator; + var blob: Blob = undefined; + var arguments = callframe.arguments_old(3); + const args = arguments.slice(); + + if (args.len < 2) { + return globalThis.throwInvalidArguments("new File(bits, name) expects at least 2 arguments", .{}); + } + { + const name_value_str = try bun.String.fromJS(args[1], globalThis); + defer name_value_str.deref(); + + blob = get(globalThis, args[0], false, true) catch |err| switch (err) { + error.JSError, error.OutOfMemory => |e| return e, + error.InvalidArguments => { + return globalThis.throwInvalidArguments("new Blob() expects an Array", .{}); + }, + }; + if (blob.store) |store_| { + switch (store_.data) { + .bytes => |*bytes| { + bytes.stored_name = bun.PathString.init( + (name_value_str.toUTF8WithoutRef(bun.default_allocator).clone(bun.default_allocator) catch bun.outOfMemory()).slice(), + ); + }, + .s3, .file => { + blob.name = name_value_str.dupeRef(); + }, + } + } else if (!name_value_str.isEmpty()) { + // not store but we have a name so we need a store + blob.store = Blob.Store.new(.{ + .data = .{ + .bytes = Blob.Store.Bytes.initEmptyWithName( + bun.PathString.init( + (name_value_str.toUTF8WithoutRef(bun.default_allocator).clone(bun.default_allocator) catch bun.outOfMemory()).slice(), + ), + allocator, + ), + }, + .allocator = allocator, + .ref_count = .init(1), + }); + } + } + + var set_last_modified = false; + + if (args.len > 2) { + const options = args[2]; + if (options.isObject()) { + // type, the ASCII-encoded string in lower case + // representing the media type of the Blob. + // Normative conditions for this member are provided + // in the § 3.1 Constructors. + if (try options.get(globalThis, "type")) |content_type| { + inner: { + if (content_type.isString()) { + var content_type_str = try content_type.toSlice(globalThis, bun.default_allocator); + defer content_type_str.deinit(); + const slice = content_type_str.slice(); + if (!strings.isAllASCII(slice)) { + break :inner; + } + blob.content_type_was_set = true; + + if (globalThis.bunVM().mimeType(slice)) |mime| { + blob.content_type = mime.value; + break :inner; + } + const content_type_buf = allocator.alloc(u8, slice.len) catch bun.outOfMemory(); + blob.content_type = strings.copyLowercase(slice, content_type_buf); + blob.content_type_allocated = true; + } + } + } + + if (try options.getTruthy(globalThis, "lastModified")) |last_modified| { + set_last_modified = true; + blob.last_modified = last_modified.coerce(f64, globalThis); + } + } + } + + if (!set_last_modified) { + // `lastModified` should be the current date in milliseconds if unspecified. + // https://developer.mozilla.org/en-US/docs/Web/API/File/lastModified + blob.last_modified = @floatFromInt(std.time.milliTimestamp()); + } + + if (blob.content_type.len == 0) { + blob.content_type = ""; + blob.content_type_was_set = false; + } + + var blob_ = Blob.new(blob); + blob_.allocator = allocator; + blob_.is_jsdom_file = true; + return blob_; +} + +fn calculateEstimatedByteSize(this: *Blob) void { + // in-memory size. not the size on disk. + var size: usize = @sizeOf(Blob); + + if (this.store) |store| { + size += @sizeOf(Blob.Store); + switch (store.data) { + .bytes => { + size += store.data.bytes.stored_name.estimatedSize(); + size += if (this.size != Blob.max_size) + this.size + else + store.data.bytes.len; + }, + .file => size += store.data.file.pathlike.estimatedSize(), + .s3 => size += store.data.s3.estimatedSize(), + } + } + + this.reported_estimated_size = size + (this.content_type.len * @intFromBool(this.content_type_allocated)) + this.name.byteSlice().len; +} + +pub fn estimatedSize(this: *Blob) usize { + return this.reported_estimated_size; +} + +comptime { + _ = JSDOMFile__hasInstance; +} + +pub fn constructBunFile( + globalObject: *JSC.JSGlobalObject, + callframe: *JSC.CallFrame, +) bun.JSError!JSC.JSValue { + var vm = globalObject.bunVM(); + const arguments = callframe.arguments_old(2).slice(); + var args = JSC.CallFrame.ArgumentsSlice.init(vm, arguments); + defer args.deinit(); + + var path = (try JSC.Node.PathOrFileDescriptor.fromJS(globalObject, &args, bun.default_allocator)) orelse { + return globalObject.throwInvalidArguments("Expected file path string or file descriptor", .{}); + }; + const options = if (arguments.len >= 2) arguments[1] else null; + + if (path == .path) { + if (strings.hasPrefixComptime(path.path.slice(), "s3://")) { + return try S3File.constructInternalJS(globalObject, path.path, options); + } + } + defer path.deinitAndUnprotect(); + + var blob = Blob.findOrCreateFileFromPath(&path, globalObject, false); + + if (options) |opts| { + if (opts.isObject()) { + if (try opts.getTruthy(globalObject, "type")) |file_type| { + inner: { + if (file_type.isString()) { + var allocator = bun.default_allocator; + var str = try file_type.toSlice(globalObject, bun.default_allocator); + defer str.deinit(); + const slice = str.slice(); + if (!strings.isAllASCII(slice)) { + break :inner; + } + blob.content_type_was_set = true; + if (vm.mimeType(str.slice())) |entry| { + blob.content_type = entry.value; + break :inner; + } + const content_type_buf = allocator.alloc(u8, slice.len) catch bun.outOfMemory(); + blob.content_type = strings.copyLowercase(slice, content_type_buf); + blob.content_type_allocated = true; + } + } + } + if (try opts.getTruthy(globalObject, "lastModified")) |last_modified| { + blob.last_modified = last_modified.coerce(f64, globalObject); + } + } + } + + var ptr = Blob.new(blob); + ptr.allocator = bun.default_allocator; + return ptr.toJS(globalObject); +} + +pub fn findOrCreateFileFromPath(path_or_fd: *JSC.Node.PathOrFileDescriptor, globalThis: *JSGlobalObject, comptime check_s3: bool) Blob { + var vm = globalThis.bunVM(); + const allocator = bun.default_allocator; + if (check_s3) { + if (path_or_fd.* == .path) { + if (strings.startsWith(path_or_fd.path.slice(), "s3://")) { + const credentials = globalThis.bunVM().transpiler.env.getS3Credentials(); + const copy = path_or_fd.*; + path_or_fd.* = .{ .path = .{ .string = bun.PathString.empty } }; + return Blob.initWithStore(Blob.Store.initS3(copy.path, null, credentials, allocator) catch bun.outOfMemory(), globalThis); + } + } + } + const path: JSC.Node.PathOrFileDescriptor = brk: { + switch (path_or_fd.*) { + .path => { + var slice = path_or_fd.path.slice(); + + if (Environment.isWindows and bun.strings.eqlComptime(slice, "/dev/null")) { + path_or_fd.deinit(); + path_or_fd.* = .{ + .path = .{ + // this memory is freed with this allocator in `Blob.Store.deinit` + .string = bun.PathString.init(allocator.dupe(u8, "\\\\.\\NUL") catch bun.outOfMemory()), + }, + }; + slice = path_or_fd.path.slice(); + } + + if (vm.standalone_module_graph) |graph| { + if (graph.find(slice)) |file| { + defer { + if (path_or_fd.path != .string) { + path_or_fd.deinit(); + path_or_fd.* = .{ .path = .{ .string = bun.PathString.empty } }; + } + } + + return file.blob(globalThis).dupe(); + } + } + + path_or_fd.toThreadSafe(); + const copy = path_or_fd.*; + path_or_fd.* = .{ .path = .{ .string = bun.PathString.empty } }; + break :brk copy; + }, + .fd => { + if (path_or_fd.fd.stdioTag()) |tag| { + const store = switch (tag) { + .std_in => vm.rareData().stdin(), + .std_err => vm.rareData().stderr(), + .std_out => vm.rareData().stdout(), + }; + store.ref(); + return Blob.initWithStore(store, globalThis); + } + break :brk path_or_fd.*; + }, + } + }; + + return Blob.initWithStore(Blob.Store.initFile(path, null, allocator) catch bun.outOfMemory(), globalThis); +} + +pub fn getStream( + this: *Blob, + globalThis: *JSC.JSGlobalObject, + callframe: *JSC.CallFrame, +) bun.JSError!JSC.JSValue { + const thisValue = callframe.this(); + if (js.streamGetCached(thisValue)) |cached| { + return cached; + } + var recommended_chunk_size: SizeType = 0; + var arguments_ = callframe.arguments_old(2); + var arguments = arguments_.ptr[0..arguments_.len]; + if (arguments.len > 0) { + if (!arguments[0].isNumber() and !arguments[0].isUndefinedOrNull()) { + return globalThis.throwInvalidArguments("chunkSize must be a number", .{}); + } + + recommended_chunk_size = @as(SizeType, @intCast(@max(0, @as(i52, @truncate(arguments[0].toInt64()))))); + } + const stream = JSC.WebCore.ReadableStream.fromBlob( + globalThis, + this, + recommended_chunk_size, + ); + + if (this.store) |store| { + switch (store.data) { + .file => |f| switch (f.pathlike) { + .fd => { + // in the case we have a file descriptor store, we want to de-duplicate + // readable streams. in every other case we want `.stream()` to be it's + // own stream. + js.streamSetCached(thisValue, globalThis, stream); + }, + else => {}, + }, + else => {}, + } + } + + return stream; +} + +pub fn toStreamWithOffset( + globalThis: *JSC.JSGlobalObject, + callframe: *JSC.CallFrame, +) bun.JSError!JSC.JSValue { + const this = callframe.this().as(Blob) orelse @panic("this is not a Blob"); + const args = callframe.arguments_old(1).slice(); + + return JSC.WebCore.ReadableStream.fromFileBlobWithOffset( + globalThis, + this, + @intCast(args[0].toInt64()), + ); +} + +// Zig doesn't let you pass a function with a comptime argument to a runtime-knwon function. +fn lifetimeWrap(comptime Fn: anytype, comptime lifetime: JSC.WebCore.Lifetime) fn (*Blob, *JSC.JSGlobalObject) JSC.JSValue { + return struct { + fn wrap(this: *Blob, globalObject: *JSC.JSGlobalObject) JSC.JSValue { + return JSC.toJSHostValue(globalObject, Fn(this, globalObject, lifetime)); + } + }.wrap; +} + +pub fn getText( + this: *Blob, + globalThis: *JSC.JSGlobalObject, + _: *JSC.CallFrame, +) bun.JSError!JSC.JSValue { + return this.getTextClone(globalThis); +} + +pub fn getTextClone( + this: *Blob, + globalObject: *JSC.JSGlobalObject, +) JSC.JSValue { + const store = this.store; + if (store) |st| st.ref(); + defer if (store) |st| st.deref(); + return JSC.JSPromise.wrap(globalObject, lifetimeWrap(toString, .clone), .{ this, globalObject }); +} + +pub fn getTextTransfer( + this: *Blob, + globalObject: *JSC.JSGlobalObject, +) JSC.JSValue { + const store = this.store; + if (store) |st| st.ref(); + defer if (store) |st| st.deref(); + return JSC.JSPromise.wrap(globalObject, lifetimeWrap(toString, .transfer), .{ this, globalObject }); +} + +pub fn getJSON( + this: *Blob, + globalThis: *JSC.JSGlobalObject, + _: *JSC.CallFrame, +) bun.JSError!JSC.JSValue { + return this.getJSONShare(globalThis); +} + +pub fn getJSONShare( + this: *Blob, + globalObject: *JSC.JSGlobalObject, +) JSC.JSValue { + const store = this.store; + if (store) |st| st.ref(); + defer if (store) |st| st.deref(); + return JSC.JSPromise.wrap(globalObject, lifetimeWrap(toJSON, .share), .{ this, globalObject }); +} +pub fn getArrayBufferTransfer( + this: *Blob, + globalThis: *JSC.JSGlobalObject, +) JSC.JSValue { + const store = this.store; + if (store) |st| st.ref(); + defer if (store) |st| st.deref(); + + return JSC.JSPromise.wrap(globalThis, lifetimeWrap(toArrayBuffer, .transfer), .{ this, globalThis }); +} + +pub fn getArrayBufferClone( + this: *Blob, + globalThis: *JSC.JSGlobalObject, +) JSC.JSValue { + const store = this.store; + if (store) |st| st.ref(); + defer if (store) |st| st.deref(); + return JSC.JSPromise.wrap(globalThis, lifetimeWrap(toArrayBuffer, .clone), .{ this, globalThis }); +} + +pub fn getArrayBuffer( + this: *Blob, + globalThis: *JSC.JSGlobalObject, + _: *JSC.CallFrame, +) bun.JSError!JSValue { + return this.getArrayBufferClone(globalThis); +} + +pub fn getBytesClone( + this: *Blob, + globalThis: *JSC.JSGlobalObject, +) JSValue { + const store = this.store; + if (store) |st| st.ref(); + defer if (store) |st| st.deref(); + return JSC.JSPromise.wrap(globalThis, lifetimeWrap(toUint8Array, .clone), .{ this, globalThis }); +} + +pub fn getBytes( + this: *Blob, + globalThis: *JSC.JSGlobalObject, + _: *JSC.CallFrame, +) bun.JSError!JSValue { + return this.getBytesClone(globalThis); +} + +pub fn getBytesTransfer( + this: *Blob, + globalThis: *JSC.JSGlobalObject, +) JSValue { + const store = this.store; + if (store) |st| st.ref(); + defer if (store) |st| st.deref(); + return JSC.JSPromise.wrap(globalThis, lifetimeWrap(toUint8Array, .transfer), .{ this, globalThis }); +} + +pub fn getFormData( + this: *Blob, + globalThis: *JSC.JSGlobalObject, + _: *JSC.CallFrame, +) bun.JSError!JSValue { + const store = this.store; + if (store) |st| st.ref(); + defer if (store) |st| st.deref(); + + return JSC.JSPromise.wrap(globalThis, lifetimeWrap(toFormData, .temporary), .{ this, globalThis }); +} + +fn getExistsSync(this: *Blob) JSC.JSValue { + if (this.size == Blob.max_size) { + this.resolveSize(); + } + + // If there's no store that means it's empty and we just return true + // it will not error to return an empty Blob + const store = this.store orelse return JSValue.jsBoolean(true); + + if (store.data == .bytes) { + // Bytes will never error + return JSValue.jsBoolean(true); + } + + // We say regular files and pipes exist. + // This is mostly meant for "Can we use this in new Response(file)?" + return JSValue.jsBoolean( + bun.isRegularFile(store.data.file.mode) or bun.sys.S.ISFIFO(store.data.file.mode), + ); +} + +pub fn isS3(this: *const Blob) bool { + if (this.store) |store| { + return store.data == .s3; + } + return false; +} + +const S3BlobDownloadTask = struct { + blob: Blob, + globalThis: *JSC.JSGlobalObject, + promise: JSC.JSPromise.Strong, + poll_ref: bun.Async.KeepAlive = .{}, + + handler: S3ReadHandler, + pub const new = bun.TrivialNew(S3BlobDownloadTask); + pub const S3ReadHandler = *const fn (this: *Blob, globalthis: *JSGlobalObject, raw_bytes: []u8) JSValue; + + pub fn callHandler(this: *S3BlobDownloadTask, raw_bytes: []u8) JSValue { + return this.handler(&this.blob, this.globalThis, raw_bytes); + } + pub fn onS3DownloadResolved(result: S3.S3DownloadResult, this: *S3BlobDownloadTask) void { + defer this.deinit(); + switch (result) { + .success => |response| { + const bytes = response.body.list.items; + if (this.blob.size == Blob.max_size) { + this.blob.size = @truncate(bytes.len); + } + JSC.AnyPromise.wrap(.{ .normal = this.promise.get() }, this.globalThis, S3BlobDownloadTask.callHandler, .{ this, bytes }); + }, + inline .not_found, .failure => |err| { + this.promise.reject(this.globalThis, err.toJS(this.globalThis, this.blob.store.?.getPath())); + }, + } + } + + pub fn init(globalThis: *JSC.JSGlobalObject, blob: *Blob, handler: S3BlobDownloadTask.S3ReadHandler) JSValue { + blob.store.?.ref(); + + const this = S3BlobDownloadTask.new(.{ + .globalThis = globalThis, + .blob = blob.*, + .promise = JSC.JSPromise.Strong.init(globalThis), + .handler = handler, + }); + const promise = this.promise.value(); + const env = this.globalThis.bunVM().transpiler.env; + const credentials = this.blob.store.?.data.s3.getCredentials(); + const path = this.blob.store.?.data.s3.path(); + + this.poll_ref.ref(globalThis.bunVM()); + if (blob.offset > 0) { + const len: ?usize = if (blob.size != Blob.max_size) @intCast(blob.size) else null; + const offset: usize = @intCast(blob.offset); + S3.downloadSlice(credentials, path, offset, len, @ptrCast(&S3BlobDownloadTask.onS3DownloadResolved), this, if (env.getHttpProxy(true, null)) |proxy| proxy.href else null); + } else if (blob.size == Blob.max_size) { + S3.download(credentials, path, @ptrCast(&S3BlobDownloadTask.onS3DownloadResolved), this, if (env.getHttpProxy(true, null)) |proxy| proxy.href else null); + } else { + const len: usize = @intCast(blob.size); + const offset: usize = @intCast(blob.offset); + S3.downloadSlice(credentials, path, offset, len, @ptrCast(&S3BlobDownloadTask.onS3DownloadResolved), this, if (env.getHttpProxy(true, null)) |proxy| proxy.href else null); + } + return promise; + } + + pub fn deinit(this: *S3BlobDownloadTask) void { + this.blob.store.?.deref(); + this.poll_ref.unref(this.globalThis.bunVM()); + this.promise.deinit(); + bun.destroy(this); + } +}; + +pub fn doWrite(this: *Blob, globalThis: *JSC.JSGlobalObject, callframe: *JSC.CallFrame) bun.JSError!JSValue { + const arguments = callframe.arguments_old(3).slice(); + var args = JSC.CallFrame.ArgumentsSlice.init(globalThis.bunVM(), arguments); + defer args.deinit(); + + const data = args.nextEat() orelse { + return globalThis.throwInvalidArguments("blob.write(pathOrFdOrBlob, blob) expects a Blob-y thing to write", .{}); + }; + if (data.isEmptyOrUndefinedOrNull()) { + return globalThis.throwInvalidArguments("blob.write(pathOrFdOrBlob, blob) expects a Blob-y thing to write", .{}); + } + var mkdirp_if_not_exists: ?bool = null; + const options = args.nextEat(); + if (options) |options_object| { + if (options_object.isObject()) { + if (try options_object.getTruthy(globalThis, "createPath")) |create_directory| { + if (!create_directory.isBoolean()) { + return globalThis.throwInvalidArgumentType("write", "options.createPath", "boolean"); + } + mkdirp_if_not_exists = create_directory.toBoolean(); + } + if (try options_object.getTruthy(globalThis, "type")) |content_type| { + //override the content type + if (!content_type.isString()) { + return globalThis.throwInvalidArgumentType("write", "options.type", "string"); + } + var content_type_str = try content_type.toSlice(globalThis, bun.default_allocator); + defer content_type_str.deinit(); + const slice = content_type_str.slice(); + if (strings.isAllASCII(slice)) { + if (this.content_type_allocated) { + bun.default_allocator.free(this.content_type); + } + this.content_type_was_set = true; + + if (globalThis.bunVM().mimeType(slice)) |mime| { + this.content_type = mime.value; + } else { + const content_type_buf = bun.default_allocator.alloc(u8, slice.len) catch bun.outOfMemory(); + this.content_type = strings.copyLowercase(slice, content_type_buf); + this.content_type_allocated = true; + } + } + } + } else if (!options_object.isEmptyOrUndefinedOrNull()) { + return globalThis.throwInvalidArgumentType("write", "options", "object"); + } + } + var blob_internal: PathOrBlob = .{ .blob = this.* }; + return writeFileInternal(globalThis, &blob_internal, data, .{ .mkdirp_if_not_exists = mkdirp_if_not_exists, .extra_options = options }); +} + +pub fn doUnlink(this: *Blob, globalThis: *JSC.JSGlobalObject, callframe: *JSC.CallFrame) bun.JSError!JSValue { + const arguments = callframe.arguments_old(1).slice(); + var args = JSC.CallFrame.ArgumentsSlice.init(globalThis.bunVM(), arguments); + defer args.deinit(); + const store = this.store orelse { + return JSC.JSPromise.resolvedPromiseValue(globalThis, globalThis.createInvalidArgs("Blob is detached", .{})); + }; + return switch (store.data) { + .s3 => |*s3| try s3.unlink(store, globalThis, args.nextEat()), + .file => |file| file.unlink(globalThis), + else => JSC.JSPromise.resolvedPromiseValue(globalThis, globalThis.createInvalidArgs("Blob is read-only", .{})), + }; +} + +// This mostly means 'can it be read?' +pub fn getExists( + this: *Blob, + globalThis: *JSC.JSGlobalObject, + _: *JSC.CallFrame, +) bun.JSError!JSValue { + if (this.isS3()) { + return S3File.S3BlobStatTask.exists(globalThis, this); + } + return JSC.JSPromise.resolvedPromiseValue(globalThis, this.getExistsSync()); +} + +pub const FileStreamWrapper = struct { + promise: JSC.JSPromise.Strong, + readable_stream_ref: JSC.WebCore.ReadableStream.Strong, + sink: *JSC.WebCore.FileSink, + + pub const new = bun.TrivialNew(@This()); + + pub fn deinit(this: *@This()) void { + this.promise.deinit(); + this.readable_stream_ref.deinit(); + this.sink.deref(); + bun.destroy(this); + } +}; + +pub fn onFileStreamResolveRequestStream(globalThis: *JSC.JSGlobalObject, callframe: *JSC.CallFrame) bun.JSError!JSC.JSValue { + var args = callframe.arguments_old(2); + var this = args.ptr[args.len - 1].asPromisePtr(FileStreamWrapper); + defer this.deinit(); + var strong = this.readable_stream_ref; + defer strong.deinit(); + this.readable_stream_ref = .{}; + if (strong.get(globalThis)) |stream| { + stream.done(globalThis); + } + this.promise.resolve(globalThis, JSC.JSValue.jsNumber(0)); + return .undefined; +} + +pub fn onFileStreamRejectRequestStream(globalThis: *JSC.JSGlobalObject, callframe: *JSC.CallFrame) bun.JSError!JSC.JSValue { + const args = callframe.arguments_old(2); + var this = args.ptr[args.len - 1].asPromisePtr(FileStreamWrapper); + defer this.sink.deref(); + const err = args.ptr[0]; + + var strong = this.readable_stream_ref; + defer strong.deinit(); + this.readable_stream_ref = .{}; + + this.promise.reject(globalThis, err); + + if (strong.get(globalThis)) |stream| { + stream.cancel(globalThis); + } + return .undefined; +} +comptime { + const jsonResolveRequestStream = JSC.toJSHostFn(onFileStreamResolveRequestStream); + @export(&jsonResolveRequestStream, .{ .name = "Bun__FileStreamWrapper__onResolveRequestStream" }); + const jsonRejectRequestStream = JSC.toJSHostFn(onFileStreamRejectRequestStream); + @export(&jsonRejectRequestStream, .{ .name = "Bun__FileStreamWrapper__onRejectRequestStream" }); +} + +pub fn pipeReadableStreamToBlob(this: *Blob, globalThis: *JSC.JSGlobalObject, readable_stream: JSC.WebCore.ReadableStream, extra_options: ?JSValue) JSC.JSValue { + var store = this.store orelse { + return JSC.JSPromise.dangerouslyCreateRejectedPromiseValueWithoutNotifyingVM(globalThis, globalThis.createErrorInstance("Blob is detached", .{})); + }; + + if (this.isS3()) { + const s3 = &this.store.?.data.s3; + var aws_options = s3.getCredentialsWithOptions(extra_options, globalThis) catch |err| { + return JSC.JSPromise.dangerouslyCreateRejectedPromiseValueWithoutNotifyingVM(globalThis, globalThis.takeException(err)); + }; + defer aws_options.deinit(); + + const path = s3.path(); + const proxy = globalThis.bunVM().transpiler.env.getHttpProxy(true, null); + const proxy_url = if (proxy) |p| p.href else null; + + return S3.uploadStream( + (if (extra_options != null) aws_options.credentials.dupe() else s3.getCredentials()), + path, + readable_stream, + globalThis, + aws_options.options, + aws_options.acl, + aws_options.storage_class, + this.contentTypeOrMimeType(), + proxy_url, + null, + undefined, + ); + } + + if (store.data != .file) { + return JSC.JSPromise.dangerouslyCreateRejectedPromiseValueWithoutNotifyingVM(globalThis, globalThis.createErrorInstance("Blob is read-only", .{})); + } + + const file_sink = brk_sink: { + if (Environment.isWindows) { + const pathlike = store.data.file.pathlike; + const fd: bun.FileDescriptor = if (pathlike == .fd) pathlike.fd else brk: { + var file_path: bun.PathBuffer = undefined; + const path = pathlike.path.sliceZ(&file_path); + switch (bun.sys.open( + path, + bun.O.WRONLY | bun.O.CREAT | bun.O.NONBLOCK, + write_permissions, + )) { + .result => |result| { + break :brk result; + }, + .err => |err| { + return JSC.JSPromise.dangerouslyCreateRejectedPromiseValueWithoutNotifyingVM(globalThis, err.withPath(path).toJSC(globalThis)); + }, + } + unreachable; + }; + + const is_stdout_or_stderr = brk: { + if (pathlike != .fd) { + break :brk false; + } + + if (globalThis.bunVM().rare_data) |rare| { + if (store == rare.stdout_store) { + break :brk true; + } + + if (store == rare.stderr_store) { + break :brk true; + } + } + + break :brk if (fd.stdioTag()) |tag| switch (tag) { + .std_out, .std_err => true, + else => false, + } else false; + }; + var sink = JSC.WebCore.FileSink.init(fd, this.globalThis.bunVM().eventLoop()); + sink.writer.owns_fd = pathlike != .fd; + + if (is_stdout_or_stderr) { + switch (sink.writer.startSync(fd, false)) { + .err => |err| { + sink.deref(); + return JSC.JSPromise.dangerouslyCreateRejectedPromiseValueWithoutNotifyingVM(globalThis, err.toJSC(globalThis)); + }, + else => {}, + } + } else { + switch (sink.writer.start(fd, true)) { + .err => |err| { + sink.deref(); + return JSC.JSPromise.dangerouslyCreateRejectedPromiseValueWithoutNotifyingVM(globalThis, err.toJSC(globalThis)); + }, + else => {}, + } + } + + break :brk_sink sink; + } + + var sink = JSC.WebCore.FileSink.init(bun.invalid_fd, this.globalThis.bunVM().eventLoop()); + + const input_path: JSC.WebCore.PathOrFileDescriptor = brk: { + if (store.data.file.pathlike == .fd) { + break :brk .{ .fd = store.data.file.pathlike.fd }; + } else { + break :brk .{ + .path = ZigString.Slice.fromUTF8NeverFree( + store.data.file.pathlike.path.slice(), + ).clone( + bun.default_allocator, + ) catch bun.outOfMemory(), + }; + } + }; + defer input_path.deinit(); + + const stream_start: JSC.WebCore.streams.Start = .{ + .FileSink = .{ + .input_path = input_path, + }, + }; + + switch (sink.start(stream_start)) { + .err => |err| { + sink.deref(); + return JSC.JSPromise.dangerouslyCreateRejectedPromiseValueWithoutNotifyingVM(globalThis, err.toJSC(globalThis)); + }, + else => {}, + } + break :brk_sink sink; + }; + var signal = &file_sink.signal; + + signal.* = JSC.WebCore.FileSink.JSSink.SinkSignal.init(.zero); + + // explicitly set it to a dead pointer + // we use this memory address to disable signals being sent + signal.clear(); + bun.assert(signal.isDead()); + + const assignment_result: JSC.JSValue = JSC.WebCore.FileSink.JSSink.assignToStream( + globalThis, + readable_stream.value, + file_sink, + @as(**anyopaque, @ptrCast(&signal.ptr)), + ); + + assignment_result.ensureStillAlive(); + + // assert that it was updated + bun.assert(!signal.isDead()); + + if (assignment_result.toError()) |err| { + file_sink.deref(); + return JSC.JSPromise.dangerouslyCreateRejectedPromiseValueWithoutNotifyingVM(globalThis, err); + } + + if (!assignment_result.isEmptyOrUndefinedOrNull()) { + globalThis.bunVM().drainMicrotasks(); + + assignment_result.ensureStillAlive(); + // it returns a Promise when it goes through ReadableStreamDefaultReader + if (assignment_result.asAnyPromise()) |promise| { + switch (promise.status(globalThis.vm())) { + .pending => { + const wrapper = FileStreamWrapper.new(.{ + .promise = JSC.JSPromise.Strong.init(globalThis), + .readable_stream_ref = JSC.WebCore.ReadableStream.Strong.init(readable_stream, globalThis), + .sink = file_sink, + }); + const promise_value = wrapper.promise.value(); + + assignment_result.then( + globalThis, + wrapper, + onFileStreamResolveRequestStream, + onFileStreamRejectRequestStream, + ); + return promise_value; + }, + .fulfilled => { + file_sink.deref(); + readable_stream.done(globalThis); + return JSC.JSPromise.resolvedPromiseValue(globalThis, JSC.JSValue.jsNumber(0)); + }, + .rejected => { + file_sink.deref(); + + readable_stream.cancel(globalThis); + + return JSC.JSPromise.dangerouslyCreateRejectedPromiseValueWithoutNotifyingVM(globalThis, promise.result(globalThis.vm())); + }, + } + } else { + file_sink.deref(); + + readable_stream.cancel(globalThis); + + return JSC.JSPromise.dangerouslyCreateRejectedPromiseValueWithoutNotifyingVM(globalThis, assignment_result); + } + } + file_sink.deref(); + + return JSC.JSPromise.resolvedPromiseValue(globalThis, JSC.JSValue.jsNumber(0)); +} + +pub fn getWriter( + this: *Blob, + globalThis: *JSC.JSGlobalObject, + callframe: *JSC.CallFrame, +) bun.JSError!JSC.JSValue { + var arguments_ = callframe.arguments_old(1); + var arguments = arguments_.ptr[0..arguments_.len]; + + if (!arguments.ptr[0].isEmptyOrUndefinedOrNull() and !arguments.ptr[0].isObject()) { + return globalThis.throwInvalidArguments("options must be an object or undefined", .{}); + } + + var store = this.store orelse { + return globalThis.throwInvalidArguments("Blob is detached", .{}); + }; + if (this.isS3()) { + const s3 = &this.store.?.data.s3; + const path = s3.path(); + const proxy = globalThis.bunVM().transpiler.env.getHttpProxy(true, null); + const proxy_url = if (proxy) |p| p.href else null; + if (arguments.len > 0) { + const options = arguments.ptr[0]; + if (options.isObject()) { + if (try options.getTruthy(globalThis, "type")) |content_type| { + //override the content type + if (!content_type.isString()) { + return globalThis.throwInvalidArgumentType("write", "options.type", "string"); + } + var content_type_str = try content_type.toSlice(globalThis, bun.default_allocator); + defer content_type_str.deinit(); + const slice = content_type_str.slice(); + if (strings.isAllASCII(slice)) { + if (this.content_type_allocated) { + bun.default_allocator.free(this.content_type); + } + this.content_type_was_set = true; + + if (globalThis.bunVM().mimeType(slice)) |mime| { + this.content_type = mime.value; + } else { + const content_type_buf = bun.default_allocator.alloc(u8, slice.len) catch bun.outOfMemory(); + this.content_type = strings.copyLowercase(slice, content_type_buf); + this.content_type_allocated = true; + } + } + } + const credentialsWithOptions = try s3.getCredentialsWithOptions(options, globalThis); + return try S3.writableStream( + credentialsWithOptions.credentials.dupe(), + path, + globalThis, + credentialsWithOptions.options, + this.contentTypeOrMimeType(), + proxy_url, + credentialsWithOptions.storage_class, + ); + } + } + return try S3.writableStream( + s3.getCredentials(), + path, + globalThis, + .{}, + this.contentTypeOrMimeType(), + proxy_url, + null, + ); + } + if (store.data != .file) { + return globalThis.throwInvalidArguments("Blob is read-only", .{}); + } + + if (Environment.isWindows) { + const pathlike = store.data.file.pathlike; + const vm = globalThis.bunVM(); + const fd: bun.FileDescriptor = if (pathlike == .fd) pathlike.fd else brk: { + var file_path: bun.PathBuffer = undefined; + switch (bun.sys.open( + pathlike.path.sliceZ(&file_path), + bun.O.WRONLY | bun.O.CREAT | bun.O.NONBLOCK, + write_permissions, + )) { + .result => |result| { + break :brk result; + }, + .err => |err| { + return globalThis.throwValue(err.withPath(pathlike.path.slice()).toJSC(globalThis)); + }, + } + @compileError(unreachable); + }; + + const is_stdout_or_stderr = brk: { + if (pathlike != .fd) { + break :brk false; + } + + if (vm.rare_data) |rare| { + if (store == rare.stdout_store) { + break :brk true; + } + + if (store == rare.stderr_store) { + break :brk true; + } + } + + break :brk if (fd.stdioTag()) |tag| switch (tag) { + .std_out, .std_err => true, + else => false, + } else false; + }; + var sink = JSC.WebCore.FileSink.init(fd, this.globalThis.bunVM().eventLoop()); + sink.writer.owns_fd = pathlike != .fd; + + if (is_stdout_or_stderr) { + switch (sink.writer.startSync(fd, false)) { + .err => |err| { + sink.deref(); + return globalThis.throwValue(err.toJSC(globalThis)); + }, + else => {}, + } + } else { + switch (sink.writer.start(fd, true)) { + .err => |err| { + sink.deref(); + return globalThis.throwValue(err.toJSC(globalThis)); + }, + else => {}, + } + } + + return sink.toJS(globalThis); + } + + var sink = JSC.WebCore.FileSink.init(bun.invalid_fd, this.globalThis.bunVM().eventLoop()); + + const input_path: JSC.WebCore.PathOrFileDescriptor = brk: { + if (store.data.file.pathlike == .fd) { + break :brk .{ .fd = store.data.file.pathlike.fd }; + } else { + break :brk .{ + .path = ZigString.Slice.fromUTF8NeverFree( + store.data.file.pathlike.path.slice(), + ).clone( + globalThis.allocator(), + ) catch bun.outOfMemory(), + }; + } + }; + defer input_path.deinit(); + + var stream_start: bun.webcore.streams.Start = .{ + .FileSink = .{ + .input_path = input_path, + }, + }; + + if (arguments.len > 0 and arguments.ptr[0].isObject()) { + stream_start = try JSC.WebCore.streams.Start.fromJSWithTag(globalThis, arguments[0], .FileSink); + stream_start.FileSink.input_path = input_path; + } + + switch (sink.start(stream_start)) { + .err => |err| { + sink.deref(); + return globalThis.throwValue(err.toJSC(globalThis)); + }, + else => {}, + } + + return sink.toJS(globalThis); +} + +pub fn getSliceFrom(this: *Blob, globalThis: *JSC.JSGlobalObject, relativeStart: i64, relativeEnd: i64, content_type: []const u8, content_type_was_allocated: bool) JSValue { + const offset = this.offset +| @as(SizeType, @intCast(relativeStart)); + const len = @as(SizeType, @intCast(@max(relativeEnd -| relativeStart, 0))); + + // This copies over the is_all_ascii flag + // which is okay because this will only be a <= slice + var blob = this.dupe(); + blob.offset = offset; + blob.size = len; + + // infer the content type if it was not specified + if (content_type.len == 0 and this.content_type.len > 0 and !this.content_type_allocated) { + blob.content_type = this.content_type; + } else { + blob.content_type = content_type; + } + blob.content_type_allocated = content_type_was_allocated; + blob.content_type_was_set = this.content_type_was_set or content_type_was_allocated; + + var blob_ = Blob.new(blob); + blob_.allocator = bun.default_allocator; + return blob_.toJS(globalThis); +} + +/// https://w3c.github.io/FileAPI/#slice-method-algo +/// The slice() method returns a new Blob object with bytes ranging from the +/// optional start parameter up to but not including the optional end +/// parameter, and with a type attribute that is the value of the optional +/// contentType parameter. It must act as follows: +pub fn getSlice( + this: *Blob, + globalThis: *JSC.JSGlobalObject, + callframe: *JSC.CallFrame, +) bun.JSError!JSC.JSValue { + const allocator = bun.default_allocator; + var arguments_ = callframe.arguments_old(3); + var args = arguments_.ptr[0..arguments_.len]; + + if (this.size == 0) { + const empty = Blob.initEmpty(globalThis); + var ptr = Blob.new(empty); + ptr.allocator = allocator; + return ptr.toJS(globalThis); + } + + // If the optional start parameter is not used as a parameter when making this call, let relativeStart be 0. + var relativeStart: i64 = 0; + + // If the optional end parameter is not used as a parameter when making this call, let relativeEnd be size. + var relativeEnd: i64 = @as(i64, @intCast(this.size)); + + if (args.ptr[0].isString()) { + args.ptr[2] = args.ptr[0]; + args.ptr[1] = .zero; + args.ptr[0] = .zero; + args.len = 3; + } else if (args.ptr[1].isString()) { + args.ptr[2] = args.ptr[1]; + args.ptr[1] = .zero; + args.len = 3; + } + + var args_iter = JSC.CallFrame.ArgumentsSlice.init(globalThis.bunVM(), args); + if (args_iter.nextEat()) |start_| { + if (start_.isNumber()) { + const start = start_.toInt64(); + if (start < 0) { + // If the optional start parameter is negative, let relativeStart be start + size. + relativeStart = @as(i64, @intCast(@max(start +% @as(i64, @intCast(this.size)), 0))); + } else { + // Otherwise, let relativeStart be start. + relativeStart = @min(@as(i64, @intCast(start)), @as(i64, @intCast(this.size))); + } + } + } + + if (args_iter.nextEat()) |end_| { + if (end_.isNumber()) { + const end = end_.toInt64(); + // If end is negative, let relativeEnd be max((size + end), 0). + if (end < 0) { + // If the optional start parameter is negative, let relativeStart be start + size. + relativeEnd = @as(i64, @intCast(@max(end +% @as(i64, @intCast(this.size)), 0))); + } else { + // Otherwise, let relativeStart be start. + relativeEnd = @min(@as(i64, @intCast(end)), @as(i64, @intCast(this.size))); + } + } + } + + var content_type: string = ""; + var content_type_was_allocated = false; + if (args_iter.nextEat()) |content_type_| { + inner: { + if (content_type_.isString()) { + var zig_str = try content_type_.getZigString(globalThis); + var slicer = zig_str.toSlice(bun.default_allocator); + defer slicer.deinit(); + const slice = slicer.slice(); + if (!strings.isAllASCII(slice)) { + break :inner; + } + + if (globalThis.bunVM().mimeType(slice)) |mime| { + content_type = mime.value; + break :inner; + } + + content_type_was_allocated = slice.len > 0; + const content_type_buf = allocator.alloc(u8, slice.len) catch bun.outOfMemory(); + content_type = strings.copyLowercase(slice, content_type_buf); + } + } + } + + return this.getSliceFrom(globalThis, relativeStart, relativeEnd, content_type, content_type_was_allocated); +} + +pub fn getMimeType(this: *const Blob) ?bun.http.MimeType { + if (this.store) |store| { + return store.mime_type; + } + + return null; +} + +pub fn getMimeTypeOrContentType(this: *const Blob) ?bun.http.MimeType { + if (this.content_type_was_set) { + return bun.http.MimeType.init(this.content_type, null, null); + } + + if (this.store) |store| { + return store.mime_type; + } + + return null; +} + +pub fn getType( + this: *Blob, + globalThis: *JSC.JSGlobalObject, +) JSValue { + if (this.content_type.len > 0) { + if (this.content_type_allocated) { + return ZigString.init(this.content_type).toJS(globalThis); + } + return ZigString.init(this.content_type).toJS(globalThis); + } + + if (this.store) |store| { + return ZigString.init(store.mime_type.value).toJS(globalThis); + } + + return ZigString.Empty.toJS(globalThis); +} + +pub fn getNameString(this: *Blob) ?bun.String { + if (this.name.tag != .Dead) return this.name; + + if (this.getFileName()) |path| { + this.name = bun.String.createUTF8(path); + return this.name; + } + + return null; +} + +// TODO: Move this to a separate `File` object or BunFile +pub fn getName( + this: *Blob, + _: JSC.JSValue, + globalThis: *JSC.JSGlobalObject, +) JSValue { + return if (this.getNameString()) |name| name.toJS(globalThis) else .undefined; +} + +pub fn setName( + this: *Blob, + jsThis: JSC.JSValue, + globalThis: *JSC.JSGlobalObject, + value: JSValue, + + // TODO: support JSError for getters/setters +) bool { + // by default we don't have a name so lets allow it to be set undefined + if (value.isEmptyOrUndefinedOrNull()) { + this.name.deref(); + this.name = bun.String.dead; + js.nameSetCached(jsThis, globalThis, value); + return true; + } + if (value.isString()) { + const old_name = this.name; + + this.name = bun.String.fromJS(value, globalThis) catch |err| { + switch (err) { + error.JSError => {}, + error.OutOfMemory => { + globalThis.throwOutOfMemory() catch {}; + }, + } + this.name = bun.String.empty; + return false; + }; + // We don't need to increment the reference count since tryFromJS already did it. + js.nameSetCached(jsThis, globalThis, value); + old_name.deref(); + return true; + } + return false; +} + +pub fn getFileName( + this: *const Blob, +) ?[]const u8 { + if (this.store) |store| { + if (store.data == .file) { + if (store.data.file.pathlike == .path) { + return store.data.file.pathlike.path.slice(); + } + + // we shouldn't return Number here. + } else if (store.data == .bytes) { + if (store.data.bytes.stored_name.slice().len > 0) + return store.data.bytes.stored_name.slice(); + } else if (store.data == .s3) { + return store.data.s3.path(); + } + } + + return null; +} + +pub fn getLoader(blob: *const Blob, jsc_vm: *VirtualMachine) ?bun.options.Loader { + if (blob.getFileName()) |filename| { + const current_path = bun.fs.Path.init(filename); + return current_path.loader(&jsc_vm.transpiler.options.loaders) orelse .tsx; + } else if (blob.getMimeTypeOrContentType()) |mime_type| { + return .fromMimeType(mime_type); + } else { + // Be maximally permissive. + return .tsx; + } +} + +// TODO: Move this to a separate `File` object or BunFile +pub fn getLastModified( + this: *Blob, + _: *JSC.JSGlobalObject, +) JSValue { + if (this.store) |store| { + if (store.data == .file) { + // last_modified can be already set during read. + if (store.data.file.last_modified == JSC.init_timestamp and !this.isS3()) { + resolveFileStat(store); + } + return JSValue.jsNumber(store.data.file.last_modified); + } + } + + if (this.is_jsdom_file) { + return JSValue.jsNumber(this.last_modified); + } + + return JSValue.jsNumber(JSC.init_timestamp); +} + +pub fn getSizeForBindings(this: *Blob) u64 { + if (this.size == Blob.max_size) { + this.resolveSize(); + } + + // If the file doesn't exist or is not seekable + // signal that the size is unknown. + if (this.store != null and this.store.?.data == .file and + !(this.store.?.data.file.seekable orelse false)) + { + return std.math.maxInt(u64); + } + + if (this.size == Blob.max_size) + return std.math.maxInt(u64); + + return this.size; +} + +export fn Bun__Blob__getSizeForBindings(this: *Blob) callconv(.C) u64 { + return this.getSizeForBindings(); +} + +pub fn getStat(this: *Blob, globalThis: *JSC.JSGlobalObject, callback: *JSC.CallFrame) bun.JSError!JSC.JSValue { + const store = this.store orelse return JSC.JSValue.jsUndefined(); + // TODO: make this async for files + return switch (store.data) { + .file => |*file| { + return switch (file.pathlike) { + .path => |path_like| { + return bun.api.node.fs.Async.stat.create(globalThis, undefined, .{ + .path = .{ + .encoded_slice = switch (path_like) { + // it's already converted to utf8 + .encoded_slice => |slice| try slice.toOwned(bun.default_allocator), + else => try ZigString.init(path_like.slice()).toSliceClone(bun.default_allocator), + }, + }, + }, globalThis.bunVM()); + }, + .fd => |fd| bun.api.node.fs.Async.fstat.create(globalThis, undefined, .{ .fd = fd }, globalThis.bunVM()), + }; + }, + .s3 => S3File.getStat(this, globalThis, callback), + else => JSC.JSValue.jsUndefined(), + }; +} +pub fn getSize(this: *Blob, _: *JSC.JSGlobalObject) JSValue { + if (this.size == Blob.max_size) { + if (this.isS3()) { + return JSC.JSValue.jsNumber(std.math.nan(f64)); + } + this.resolveSize(); + if (this.size == Blob.max_size and this.store != null) { + return JSC.jsNumber(std.math.inf(f64)); + } else if (this.size == 0 and this.store != null) { + if (this.store.?.data == .file and + (this.store.?.data.file.seekable orelse true) == false and + this.store.?.data.file.max_size == Blob.max_size) + { + return JSC.jsNumber(std.math.inf(f64)); + } + } + } + + return JSValue.jsNumber(this.size); +} + +pub fn resolveSize(this: *Blob) void { + if (this.store) |store| { + if (store.data == .bytes) { + const offset = this.offset; + const store_size = store.size(); + if (store_size != Blob.max_size) { + this.offset = @min(store_size, offset); + this.size = store_size - offset; + } + + return; + } else if (store.data == .file) { + if (store.data.file.seekable == null) { + resolveFileStat(store); + } + + if (store.data.file.seekable != null and store.data.file.max_size != Blob.max_size) { + const store_size = store.data.file.max_size; + const offset = this.offset; + + this.offset = @min(store_size, offset); + this.size = store_size -| offset; + return; + } + } + + this.size = 0; + } else { + this.size = 0; + } +} + +/// resolve file stat like size, last_modified +fn resolveFileStat(store: *Store) void { + if (store.data.file.pathlike == .path) { + var buffer: bun.PathBuffer = undefined; + switch (bun.sys.stat(store.data.file.pathlike.path.sliceZ(&buffer))) { + .result => |stat| { + store.data.file.max_size = if (bun.isRegularFile(stat.mode) or stat.size > 0) + @truncate(@as(u64, @intCast(@max(stat.size, 0)))) + else + Blob.max_size; + store.data.file.mode = @intCast(stat.mode); + store.data.file.seekable = bun.isRegularFile(stat.mode); + store.data.file.last_modified = JSC.toJSTime(stat.mtime().sec, stat.mtime().nsec); + }, + // the file may not exist yet. Thats's okay. + else => {}, + } + } else if (store.data.file.pathlike == .fd) { + switch (bun.sys.fstat(store.data.file.pathlike.fd)) { + .result => |stat| { + store.data.file.max_size = if (bun.isRegularFile(stat.mode) or stat.size > 0) + @as(SizeType, @truncate(@as(u64, @intCast(@max(stat.size, 0))))) + else + Blob.max_size; + store.data.file.mode = @intCast(stat.mode); + store.data.file.seekable = bun.isRegularFile(stat.mode); + store.data.file.last_modified = JSC.toJSTime(stat.mtime().sec, stat.mtime().nsec); + }, + // the file may not exist yet. Thats's okay. + else => {}, + } + } +} + +pub fn constructor(globalThis: *JSC.JSGlobalObject, callframe: *JSC.CallFrame) bun.JSError!*Blob { + const allocator = bun.default_allocator; + var blob: Blob = undefined; + var arguments = callframe.arguments_old(2); + const args = arguments.slice(); + + switch (args.len) { + 0 => { + const empty: []u8 = &[_]u8{}; + blob = Blob.init(empty, allocator, globalThis); + }, + else => { + blob = get(globalThis, args[0], false, true) catch |err| switch (err) { + error.OutOfMemory, error.JSError => |e| return e, + error.InvalidArguments => return globalThis.throwInvalidArguments("new Blob() expects an Array", .{}), + }; + + if (args.len > 1) { + const options = args[1]; + if (options.isObject()) { + // type, the ASCII-encoded string in lower case + // representing the media type of the Blob. + // Normative conditions for this member are provided + // in the § 3.1 Constructors. + if (try options.get(globalThis, "type")) |content_type| { + inner: { + if (content_type.isString()) { + var content_type_str = try content_type.toSlice(globalThis, bun.default_allocator); + defer content_type_str.deinit(); + const slice = content_type_str.slice(); + if (!strings.isAllASCII(slice)) { + break :inner; + } + blob.content_type_was_set = true; + + if (globalThis.bunVM().mimeType(slice)) |mime| { + blob.content_type = mime.value; + break :inner; + } + const content_type_buf = allocator.alloc(u8, slice.len) catch bun.outOfMemory(); + blob.content_type = strings.copyLowercase(slice, content_type_buf); + blob.content_type_allocated = true; + } + } + } + } + } + + if (blob.content_type.len == 0) { + blob.content_type = ""; + blob.content_type_was_set = false; + } + }, + } + + blob.calculateEstimatedByteSize(); + + var blob_ = Blob.new(blob); + blob_.allocator = allocator; + return blob_; +} + +pub fn finalize(this: *Blob) void { + this.deinit(); +} + +pub fn initWithAllASCII(bytes: []u8, allocator: std.mem.Allocator, globalThis: *JSGlobalObject, is_all_ascii: bool) Blob { + // avoid allocating a Blob.Store if the buffer is actually empty + var store: ?*Blob.Store = null; + if (bytes.len > 0) { + store = Blob.Store.init(bytes, allocator); + store.?.is_all_ascii = is_all_ascii; + } + return Blob{ + .size = @as(SizeType, @truncate(bytes.len)), + .store = store, + .allocator = null, + .content_type = "", + .globalThis = globalThis, + .is_all_ascii = is_all_ascii, + }; +} + +/// Takes ownership of `bytes`, which must have been allocated with `allocator`. +pub fn init(bytes: []u8, allocator: std.mem.Allocator, globalThis: *JSGlobalObject) Blob { + return Blob{ + .size = @as(SizeType, @truncate(bytes.len)), + .store = if (bytes.len > 0) + Blob.Store.init(bytes, allocator) + else + null, + .allocator = null, + .content_type = "", + .globalThis = globalThis, + }; +} + +pub fn createWithBytesAndAllocator( + bytes: []u8, + allocator: std.mem.Allocator, + globalThis: *JSGlobalObject, + was_string: bool, +) Blob { + return Blob{ + .size = @as(SizeType, @truncate(bytes.len)), + .store = if (bytes.len > 0) + Blob.Store.init(bytes, allocator) + else + null, + .allocator = null, + .content_type = if (was_string) MimeType.text.value else "", + .globalThis = globalThis, + }; +} + +pub fn tryCreate( + bytes_: []const u8, + allocator_: std.mem.Allocator, + globalThis: *JSGlobalObject, + was_string: bool, +) !Blob { + if (comptime Environment.isLinux) { + if (bun.linux.memfd_allocator.shouldUse(bytes_)) { + switch (bun.linux.memfd_allocator.create(bytes_)) { + .err => {}, + .result => |result| { + const store = Store.new( + .{ + .data = .{ + .bytes = result, + }, + .allocator = bun.default_allocator, + .ref_count = std.atomic.Value(u32).init(1), + }, + ); + var blob = initWithStore(store, globalThis); + if (was_string and blob.content_type.len == 0) { + blob.content_type = MimeType.text.value; + } + + return blob; + }, + } + } + } + + return createWithBytesAndAllocator(try allocator_.dupe(u8, bytes_), allocator_, globalThis, was_string); +} + +pub fn create( + bytes_: []const u8, + allocator_: std.mem.Allocator, + globalThis: *JSGlobalObject, + was_string: bool, +) Blob { + return tryCreate(bytes_, allocator_, globalThis, was_string) catch bun.outOfMemory(); +} + +pub fn initWithStore(store: *Blob.Store, globalThis: *JSGlobalObject) Blob { + return Blob{ + .size = store.size(), + .store = store, + .allocator = null, + .content_type = if (store.data == .file) + store.data.file.mime_type.value + else + "", + .globalThis = globalThis, + }; +} + +pub fn initEmpty(globalThis: *JSGlobalObject) Blob { + return Blob{ + .size = 0, + .store = null, + .allocator = null, + .content_type = "", + .globalThis = globalThis, + }; +} + +// Transferring doesn't change the reference count +// It is a move +inline fn transfer(this: *Blob) void { + this.store = null; +} + +pub fn detach(this: *Blob) void { + if (this.store != null) this.store.?.deref(); + this.store = null; +} + +/// This does not duplicate +/// This creates a new view +/// and increment the reference count +pub fn dupe(this: *const Blob) Blob { + return this.dupeWithContentType(false); +} + +pub fn dupeWithContentType(this: *const Blob, include_content_type: bool) Blob { + if (this.store != null) this.store.?.ref(); + var duped = this.*; + if (duped.content_type_allocated and duped.allocator != null and !include_content_type) { + + // for now, we just want to avoid a use-after-free here + if (JSC.VirtualMachine.get().mimeType(duped.content_type)) |mime| { + duped.content_type = mime.value; + } else { + // TODO: fix this + // this is a bug. + // it means whenever + duped.content_type = ""; + } + + duped.content_type_allocated = false; + duped.content_type_was_set = false; + if (this.content_type_was_set) { + duped.content_type_was_set = duped.content_type.len > 0; + } + } else if (duped.content_type_allocated and duped.allocator != null and include_content_type) { + duped.content_type = bun.default_allocator.dupe(u8, this.content_type) catch bun.outOfMemory(); + } + duped.name = duped.name.dupeRef(); + + duped.allocator = null; + return duped; +} + +pub fn toJS(this: *Blob, globalObject: *JSC.JSGlobalObject) JSC.JSValue { + // if (comptime Environment.allow_assert) { + // assert(this.allocator != null); + // } + this.calculateEstimatedByteSize(); + + if (this.isS3()) { + return S3File.toJSUnchecked(globalObject, this); + } + + return js.toJSUnchecked(globalObject, this); +} + +pub fn deinit(this: *Blob) void { + this.detach(); + this.name.deref(); + this.name = .dead; + + // TODO: remove this field, make it a boolean. + if (this.allocator) |alloc| { + this.allocator = null; + bun.debugAssert(alloc.vtable == bun.default_allocator.vtable); + bun.destroy(this); + } +} + +pub fn sharedView(this: *const Blob) []const u8 { + if (this.size == 0 or this.store == null) return ""; + var slice_ = this.store.?.sharedView(); + if (slice_.len == 0) return ""; + slice_ = slice_[this.offset..]; + + return slice_[0..@min(slice_.len, @as(usize, this.size))]; +} + +pub const Lifetime = JSC.WebCore.Lifetime; +pub fn setIsASCIIFlag(this: *Blob, is_all_ascii: bool) void { + this.is_all_ascii = is_all_ascii; + // if this Blob represents the entire binary data + // which will be pretty common + // we can update the store's is_all_ascii flag + // and any other Blob that points to the same store + // can skip checking the encoding + if (this.size > 0 and this.offset == 0 and this.store.?.data == .bytes) { + this.store.?.is_all_ascii = is_all_ascii; + } +} + +pub fn needsToReadFile(this: *const Blob) bool { + return this.store != null and (this.store.?.data == .file); +} + +pub fn toStringWithBytes(this: *Blob, global: *JSGlobalObject, raw_bytes: []const u8, comptime lifetime: Lifetime) bun.JSError!JSValue { + const bom, const buf = strings.BOM.detectAndSplit(raw_bytes); + + if (buf.len == 0) { + // If all it contained was the bom, we need to free the bytes + if (lifetime == .temporary) bun.default_allocator.free(raw_bytes); + return ZigString.Empty.toJS(global); + } + + if (bom == .utf16_le) { + defer if (lifetime == .temporary) bun.default_allocator.free(raw_bytes); + var out = bun.String.createUTF16(bun.reinterpretSlice(u16, buf)); + defer out.deref(); + return out.toJS(global); + } + + // null == unknown + // false == can't be + const could_be_all_ascii = this.is_all_ascii orelse this.store.?.is_all_ascii; + + if (could_be_all_ascii == null or !could_be_all_ascii.?) { + // if toUTF16Alloc returns null, it means there are no non-ASCII characters + // instead of erroring, invalid characters will become a U+FFFD replacement character + if (strings.toUTF16Alloc(bun.default_allocator, buf, false, false) catch return global.throwOutOfMemory()) |external| { + if (lifetime != .temporary) + this.setIsASCIIFlag(false); + + if (lifetime == .transfer) { + this.detach(); + } + + if (lifetime == .temporary) { + bun.default_allocator.free(raw_bytes); + } + + return ZigString.toExternalU16(external.ptr, external.len, global); + } + + if (lifetime != .temporary) this.setIsASCIIFlag(true); + } + + switch (comptime lifetime) { + // strings are immutable + // we don't need to clone + .clone => { + this.store.?.ref(); + // we don't need to worry about UTF-8 BOM in this case because the store owns the memory. + return ZigString.init(buf).external(global, this.store.?, Store.external); + }, + .transfer => { + const store = this.store.?; + assert(store.data == .bytes); + this.transfer(); + // we don't need to worry about UTF-8 BOM in this case because the store owns the memory. + return ZigString.init(buf).external(global, store, Store.external); + }, + // strings are immutable + // sharing isn't really a thing + .share => { + this.store.?.ref(); + // we don't need to worry about UTF-8 BOM in this case because the store owns the memory.s + return ZigString.init(buf).external(global, this.store.?, Store.external); + }, + .temporary => { + // if there was a UTF-8 BOM, we need to clone the buffer because + // external doesn't support this case here yet. + if (buf.len != raw_bytes.len) { + var out = bun.String.createLatin1(buf); + defer { + bun.default_allocator.free(raw_bytes); + out.deref(); + } + + return out.toJS(global); + } + + return ZigString.init(buf).toExternalValue(global); + }, + } +} + +pub fn toStringTransfer(this: *Blob, global: *JSGlobalObject) bun.JSError!JSValue { + return this.toString(global, .transfer); +} + +pub fn toString(this: *Blob, global: *JSGlobalObject, comptime lifetime: Lifetime) bun.JSError!JSValue { + if (this.needsToReadFile()) { + return this.doReadFile(toStringWithBytes, global); + } + if (this.isS3()) { + return this.doReadFromS3(toStringWithBytes, global); + } + + const view_: []u8 = + @constCast(this.sharedView()); + + if (view_.len == 0) + return ZigString.Empty.toJS(global); + + return toStringWithBytes(this, global, view_, lifetime); +} + +pub fn toJSON(this: *Blob, global: *JSGlobalObject, comptime lifetime: Lifetime) bun.JSError!JSValue { + if (this.needsToReadFile()) { + return this.doReadFile(toJSONWithBytes, global); + } + if (this.isS3()) { + return this.doReadFromS3(toJSONWithBytes, global); + } + + const view_ = this.sharedView(); + + return toJSONWithBytes(this, global, view_, lifetime); +} + +pub fn toJSONWithBytes(this: *Blob, global: *JSGlobalObject, raw_bytes: []const u8, comptime lifetime: Lifetime) bun.JSError!JSValue { + const bom, const buf = strings.BOM.detectAndSplit(raw_bytes); + if (buf.len == 0) return global.createSyntaxErrorInstance("Unexpected end of JSON input", .{}); + + if (bom == .utf16_le) { + var out = bun.String.createUTF16(bun.reinterpretSlice(u16, buf)); + defer if (lifetime == .temporary) bun.default_allocator.free(raw_bytes); + defer if (lifetime == .transfer) this.detach(); + defer out.deref(); + return out.toJSByParseJSON(global); + } + // null == unknown + // false == can't be + const could_be_all_ascii = this.is_all_ascii orelse this.store.?.is_all_ascii; + defer if (comptime lifetime == .temporary) bun.default_allocator.free(@constCast(buf)); + + if (could_be_all_ascii == null or !could_be_all_ascii.?) { + var stack_fallback = std.heap.stackFallback(4096, bun.default_allocator); + const allocator = stack_fallback.get(); + // if toUTF16Alloc returns null, it means there are no non-ASCII characters + if (strings.toUTF16Alloc(allocator, buf, false, false) catch null) |external| { + if (comptime lifetime != .temporary) this.setIsASCIIFlag(false); + const result = ZigString.initUTF16(external).toJSONObject(global); + allocator.free(external); + return result; + } + + if (comptime lifetime != .temporary) this.setIsASCIIFlag(true); + } + + return ZigString.init(buf).toJSONObject(global); +} + +pub fn toFormDataWithBytes(this: *Blob, global: *JSGlobalObject, buf: []u8, comptime _: Lifetime) JSValue { + var encoder = this.getFormDataEncoding() orelse return { + return ZigString.init("Invalid encoding").toErrorInstance(global); + }; + defer encoder.deinit(); + + return bun.FormData.toJS(global, buf, encoder.encoding) catch |err| + global.createErrorInstance("FormData encoding failed: {s}", .{@errorName(err)}); +} + +pub fn toArrayBufferWithBytes(this: *Blob, global: *JSGlobalObject, buf: []u8, comptime lifetime: Lifetime) bun.JSError!JSValue { + return toArrayBufferViewWithBytes(this, global, buf, lifetime, .ArrayBuffer); +} + +pub fn toUint8ArrayWithBytes(this: *Blob, global: *JSGlobalObject, buf: []u8, comptime lifetime: Lifetime) bun.JSError!JSValue { + return toArrayBufferViewWithBytes(this, global, buf, lifetime, .Uint8Array); +} + +pub fn toArrayBufferViewWithBytes(this: *Blob, global: *JSGlobalObject, buf: []u8, comptime lifetime: Lifetime, comptime TypedArrayView: JSC.JSValue.JSType) bun.JSError!JSValue { + switch (comptime lifetime) { + .clone => { + if (TypedArrayView != .ArrayBuffer) { + // ArrayBuffer doesn't have this limit. + if (buf.len > JSC.VirtualMachine.synthetic_allocation_limit) { + this.detach(); + return global.throwOutOfMemory(); + } + } + + if (comptime Environment.isLinux) { + // If we can use a copy-on-write clone of the buffer, do so. + if (this.store) |store| { + if (store.data == .bytes) { + const allocated_slice = store.data.bytes.allocatedSlice(); + if (bun.isSliceInBuffer(buf, allocated_slice)) { + if (bun.linux.memfd_allocator.from(store.data.bytes.allocator)) |allocator| { + allocator.ref(); + defer allocator.deref(); + + const byteOffset = @as(usize, @intFromPtr(buf.ptr)) -| @as(usize, @intFromPtr(allocated_slice.ptr)); + const byteLength = buf.len; + + const result = JSC.ArrayBuffer.toArrayBufferFromSharedMemfd( + allocator.fd.cast(), + global, + byteOffset, + byteLength, + allocated_slice.len, + TypedArrayView, + ); + debug("toArrayBuffer COW clone({d}, {d}) = {d}", .{ byteOffset, byteLength, @intFromBool(result != .zero) }); + + if (result != .zero) { + return result; + } + } + } + } + } + } + return JSC.ArrayBuffer.create(global, buf, TypedArrayView); + }, + .share => { + if (buf.len > JSC.synthetic_allocation_limit and TypedArrayView != .ArrayBuffer) { + return global.throwOutOfMemory(); + } + + this.store.?.ref(); + return JSC.ArrayBuffer.fromBytes(buf, TypedArrayView).toJSWithContext( + global, + this.store.?, + JSC.BlobArrayBuffer_deallocator, + null, + ); + }, + .transfer => { + if (buf.len > JSC.VirtualMachine.synthetic_allocation_limit and TypedArrayView != .ArrayBuffer) { + this.detach(); + return global.throwOutOfMemory(); + } + + const store = this.store.?; + this.transfer(); + return JSC.ArrayBuffer.fromBytes(buf, TypedArrayView).toJSWithContext( + global, + store, + JSC.array_buffer.BlobArrayBuffer_deallocator, + null, + ); + }, + .temporary => { + if (buf.len > JSC.VirtualMachine.synthetic_allocation_limit and TypedArrayView != .ArrayBuffer) { + bun.default_allocator.free(buf); + return global.throwOutOfMemory(); + } + + return JSC.ArrayBuffer.fromBytes(buf, TypedArrayView).toJS( + global, + null, + ); + }, + } +} + +pub fn toArrayBuffer(this: *Blob, global: *JSGlobalObject, comptime lifetime: Lifetime) bun.JSError!JSValue { + debug("toArrayBuffer", .{}); + return toArrayBufferView(this, global, lifetime, .ArrayBuffer); +} + +pub fn toUint8Array(this: *Blob, global: *JSGlobalObject, comptime lifetime: Lifetime) bun.JSError!JSValue { + debug("toUin8Array", .{}); + return toArrayBufferView(this, global, lifetime, .Uint8Array); +} + +pub fn toArrayBufferView(this: *Blob, global: *JSGlobalObject, comptime lifetime: Lifetime, comptime TypedArrayView: JSC.JSValue.JSType) bun.JSError!JSValue { + const WithBytesFn = comptime if (TypedArrayView == .Uint8Array) + toUint8ArrayWithBytes + else + toArrayBufferWithBytes; + if (this.needsToReadFile()) { + return this.doReadFile(WithBytesFn, global); + } + + if (this.isS3()) { + return this.doReadFromS3(WithBytesFn, global); + } + + const view_ = this.sharedView(); + if (view_.len == 0) + return JSC.ArrayBuffer.create(global, "", TypedArrayView); + + return WithBytesFn(this, global, @constCast(view_), lifetime); +} + +pub fn toFormData(this: *Blob, global: *JSGlobalObject, comptime lifetime: Lifetime) JSValue { + if (this.needsToReadFile()) { + return this.doReadFile(toFormDataWithBytes, global); + } + if (this.isS3()) { + return this.doReadFromS3(toFormDataWithBytes, global); + } + + const view_ = this.sharedView(); + + if (view_.len == 0) + return JSC.DOMFormData.create(global); + + return toFormDataWithBytes(this, global, @constCast(view_), lifetime); +} + +const FromJsError = bun.JSError || error{InvalidArguments}; + +pub inline fn get( + global: *JSGlobalObject, + arg: JSValue, + comptime move: bool, + comptime require_array: bool, +) FromJsError!Blob { + return fromJSMovable(global, arg, move, require_array); +} + +pub inline fn fromJSMove(global: *JSGlobalObject, arg: JSValue) FromJsError!Blob { + return fromJSWithoutDeferGC(global, arg, true, false); +} + +pub inline fn fromJSClone(global: *JSGlobalObject, arg: JSValue) FromJsError!Blob { + return fromJSWithoutDeferGC(global, arg, false, true); +} + +pub inline fn fromJSCloneOptionalArray(global: *JSGlobalObject, arg: JSValue) FromJsError!Blob { + return fromJSWithoutDeferGC(global, arg, false, false); +} + +fn fromJSMovable( + global: *JSGlobalObject, + arg: JSValue, + comptime move: bool, + comptime require_array: bool, +) FromJsError!Blob { + const FromJSFunction = if (comptime move and !require_array) + fromJSMove + else if (!require_array) + fromJSCloneOptionalArray + else + fromJSClone; + + return FromJSFunction(global, arg); +} + +fn fromJSWithoutDeferGC( + global: *JSGlobalObject, + arg: JSValue, + comptime move: bool, + comptime require_array: bool, +) FromJsError!Blob { + var current = arg; + if (current.isUndefinedOrNull()) { + return Blob{ .globalThis = global }; + } + + var top_value = current; + var might_only_be_one_thing = false; + arg.ensureStillAlive(); + defer arg.ensureStillAlive(); + var fail_if_top_value_is_not_typed_array_like = false; + switch (current.jsTypeLoose()) { + .Array, .DerivedArray => { + var top_iter = JSC.JSArrayIterator.init(current, global); + might_only_be_one_thing = top_iter.len == 1; + if (top_iter.len == 0) { + return Blob{ .globalThis = global }; + } + if (might_only_be_one_thing) { + top_value = top_iter.next().?; + } + }, + else => { + might_only_be_one_thing = true; + if (require_array) { + fail_if_top_value_is_not_typed_array_like = true; + } + }, + } + + if (might_only_be_one_thing or !move) { + + // Fast path: one item, we don't need to join + switch (top_value.jsTypeLoose()) { + .Cell, + .NumberObject, + JSC.JSValue.JSType.String, + JSC.JSValue.JSType.StringObject, + JSC.JSValue.JSType.DerivedStringObject, + => { + if (!fail_if_top_value_is_not_typed_array_like) { + var str = try top_value.toBunString(global); + defer str.deref(); + const bytes, const ascii = try str.toOwnedSliceReturningAllASCII(bun.default_allocator); + return Blob.initWithAllASCII(bytes, bun.default_allocator, global, ascii); + } + }, + + JSC.JSValue.JSType.ArrayBuffer, + JSC.JSValue.JSType.Int8Array, + JSC.JSValue.JSType.Uint8Array, + JSC.JSValue.JSType.Uint8ClampedArray, + JSC.JSValue.JSType.Int16Array, + JSC.JSValue.JSType.Uint16Array, + JSC.JSValue.JSType.Int32Array, + JSC.JSValue.JSType.Uint32Array, + JSC.JSValue.JSType.Float16Array, + JSC.JSValue.JSType.Float32Array, + JSC.JSValue.JSType.Float64Array, + JSC.JSValue.JSType.BigInt64Array, + JSC.JSValue.JSType.BigUint64Array, + JSC.JSValue.JSType.DataView, + => { + return try Blob.tryCreate(top_value.asArrayBuffer(global).?.byteSlice(), bun.default_allocator, global, false); + }, + + .DOMWrapper => { + if (!fail_if_top_value_is_not_typed_array_like) { + if (top_value.as(Blob)) |blob| { + if (comptime move) { + var _blob = blob.*; + _blob.allocator = null; + blob.transfer(); + return _blob; + } else { + return blob.dupe(); + } + } else if (top_value.as(JSC.API.BuildArtifact)) |build| { + if (comptime move) { + // I don't think this case should happen? + var blob = build.blob; + blob.transfer(); + return blob; + } else { + return build.blob.dupe(); + } + } else if (current.toSliceClone(global)) |sliced| { + if (sliced.allocator.get()) |allocator| { + return Blob.initWithAllASCII(@constCast(sliced.slice()), allocator, global, false); + } + } + } + }, + + else => {}, + } + + // new Blob("ok") + // new File("ok", "file.txt") + if (fail_if_top_value_is_not_typed_array_like) { + return error.InvalidArguments; + } + } + + var stack_allocator = std.heap.stackFallback(1024, bun.default_allocator); + const stack_mem_all = stack_allocator.get(); + var stack: std.ArrayList(JSValue) = std.ArrayList(JSValue).init(stack_mem_all); + var joiner = StringJoiner{ .allocator = stack_mem_all }; + var could_have_non_ascii = false; + + defer if (stack_allocator.fixed_buffer_allocator.end_index >= 1024) stack.deinit(); + + while (true) { + switch (current.jsTypeLoose()) { + .NumberObject, + JSC.JSValue.JSType.String, + JSC.JSValue.JSType.StringObject, + JSC.JSValue.JSType.DerivedStringObject, + => { + var sliced = try current.toSlice(global, bun.default_allocator); + const allocator = sliced.allocator.get(); + could_have_non_ascii = could_have_non_ascii or !sliced.allocator.isWTFAllocator(); + joiner.push(sliced.slice(), allocator); + }, + + .Array, .DerivedArray => { + var iter = JSC.JSArrayIterator.init(current, global); + try stack.ensureUnusedCapacity(iter.len); + var any_arrays = false; + while (iter.next()) |item| { + if (item.isUndefinedOrNull()) continue; + + // When it's a string or ArrayBuffer inside an array, we can avoid the extra push/pop + // we only really want this for nested arrays + // However, we must preserve the order + // That means if there are any arrays + // we have to restart the loop + if (!any_arrays) { + switch (item.jsTypeLoose()) { + .NumberObject, + .Cell, + .String, + .StringObject, + .DerivedStringObject, + => { + var sliced = try item.toSlice(global, bun.default_allocator); + const allocator = sliced.allocator.get(); + could_have_non_ascii = could_have_non_ascii or !sliced.allocator.isWTFAllocator(); + joiner.push(sliced.slice(), allocator); + continue; + }, + .ArrayBuffer, + .Int8Array, + .Uint8Array, + .Uint8ClampedArray, + .Int16Array, + .Uint16Array, + .Int32Array, + .Uint32Array, + .Float16Array, + .Float32Array, + .Float64Array, + .BigInt64Array, + .BigUint64Array, + .DataView, + => { + could_have_non_ascii = true; + var buf = item.asArrayBuffer(global).?; + joiner.pushStatic(buf.byteSlice()); + continue; + }, + .Array, .DerivedArray => { + any_arrays = true; + could_have_non_ascii = true; + break; + }, + + .DOMWrapper => { + if (item.as(Blob)) |blob| { + could_have_non_ascii = could_have_non_ascii or !(blob.is_all_ascii orelse false); + joiner.pushStatic(blob.sharedView()); + continue; + } else if (current.toSliceClone(global)) |sliced| { + const allocator = sliced.allocator.get(); + could_have_non_ascii = could_have_non_ascii or allocator != null; + joiner.push(sliced.slice(), allocator); + } + }, + else => {}, + } + } + + stack.appendAssumeCapacity(item); + } + }, + + .DOMWrapper => { + if (current.as(Blob)) |blob| { + could_have_non_ascii = could_have_non_ascii or !(blob.is_all_ascii orelse false); + joiner.pushStatic(blob.sharedView()); + } else if (current.toSliceClone(global)) |sliced| { + const allocator = sliced.allocator.get(); + could_have_non_ascii = could_have_non_ascii or allocator != null; + joiner.push(sliced.slice(), allocator); + } + }, + + .ArrayBuffer, + .Int8Array, + .Uint8Array, + .Uint8ClampedArray, + .Int16Array, + .Uint16Array, + .Int32Array, + .Uint32Array, + .Float16Array, + .Float32Array, + .Float64Array, + .BigInt64Array, + .BigUint64Array, + .DataView, + => { + var buf = current.asArrayBuffer(global).?; + joiner.pushStatic(buf.slice()); + could_have_non_ascii = true; + }, + + else => { + var sliced = try current.toSlice(global, bun.default_allocator); + if (global.hasException()) { + const end_result = try joiner.done(bun.default_allocator); + bun.default_allocator.free(end_result); + return error.JSError; + } + could_have_non_ascii = could_have_non_ascii or !sliced.allocator.isWTFAllocator(); + joiner.push(sliced.slice(), sliced.allocator.get()); + }, + } + current = stack.pop() orelse break; + } + + const joined = try joiner.done(bun.default_allocator); + + if (!could_have_non_ascii) { + return Blob.initWithAllASCII(joined, bun.default_allocator, global, true); + } + return Blob.init(joined, bun.default_allocator, global); +} + +pub const Any = union(enum) { + Blob: Blob, + InternalBlob: Internal, + WTFStringImpl: bun.WTF.StringImpl, + + pub fn fromOwnedSlice(allocator: std.mem.Allocator, bytes: []u8) Any { + return .{ .InternalBlob = .{ .bytes = .fromOwnedSlice(allocator, bytes) } }; + } + + pub fn fromArrayList(list: std.ArrayList(u8)) Any { + return .{ .InternalBlob = .{ .bytes = list } }; + } + + /// Assumed that AnyBlob itself is covered by the caller. + pub fn memoryCost(this: *const Any) usize { + return switch (this.*) { + .Blob => |*blob| if (blob.store) |blob_store| blob_store.memoryCost() else 0, + .WTFStringImpl => |str| if (str.refCount() == 1) str.memoryCost() else 0, + .InternalBlob => |*internal_blob| internal_blob.memoryCost(), + }; + } + + pub fn hasOneRef(this: *const Any) bool { + if (this.store()) |s| { + return s.hasOneRef(); + } + + return false; + } + + pub fn getFileName(this: *const Any) ?[]const u8 { + return switch (this.*) { + .Blob => this.Blob.getFileName(), + .WTFStringImpl => null, + .InternalBlob => null, + }; + } + + pub inline fn fastSize(this: *const Any) Blob.SizeType { + return switch (this.*) { + .Blob => this.Blob.size, + .WTFStringImpl => @truncate(this.WTFStringImpl.byteLength()), + .InternalBlob => @truncate(this.slice().len), + }; + } + + pub inline fn size(this: *const Any) Blob.SizeType { + return switch (this.*) { + .Blob => this.Blob.size, + .WTFStringImpl => @truncate(this.WTFStringImpl.utf8ByteLength()), + else => @truncate(this.slice().len), + }; + } + + pub fn hasContentTypeFromUser(this: Any) bool { + return switch (this) { + .Blob => this.Blob.hasContentTypeFromUser(), + .WTFStringImpl => false, + .InternalBlob => false, + }; + } + + fn toInternalBlobIfPossible(this: *Any) void { + if (this.* == .Blob) { + if (this.Blob.store) |s| { + if (s.data == .bytes and s.hasOneRef()) { + this.* = .{ .InternalBlob = s.data.bytes.toInternalBlob() }; + s.deref(); + return; + } + } + } + } + + pub fn toActionValue(this: *Any, globalThis: *JSGlobalObject, action: streams.BufferAction.Tag) bun.JSError!JSC.JSValue { + if (action != .blob) { + this.toInternalBlobIfPossible(); + } + + switch (action) { + .text => { + if (this.* == .Blob) { + return this.toString(globalThis, .clone); + } + + return this.toStringTransfer(globalThis); + }, + .bytes => { + if (this.* == .Blob) { + return this.toArrayBufferView(globalThis, .clone, .Uint8Array); + } + + return this.toUint8ArrayTransfer(globalThis); + }, + .blob => { + const result = Blob.new(this.toBlob(globalThis)); + result.allocator = bun.default_allocator; + result.globalThis = globalThis; + return result.toJS(globalThis); + }, + .arrayBuffer => { + if (this.* == .Blob) { + return this.toArrayBufferView(globalThis, .clone, .ArrayBuffer); + } + + return this.toArrayBufferTransfer(globalThis); + }, + .json => { + return this.toJSON(globalThis, .share); + }, + } + } + + pub fn toPromise(this: *Any, globalThis: *JSGlobalObject, action: streams.BufferAction.Tag) JSC.JSValue { + return JSC.JSPromise.wrap(globalThis, toActionValue, .{ this, globalThis, action }); + } + + pub fn wrap(this: *Any, promise: JSC.AnyPromise, globalThis: *JSGlobalObject, action: streams.BufferAction.Tag) void { + promise.wrap(globalThis, toActionValue, .{ this, globalThis, action }); + } + + pub fn toJSON(this: *Any, global: *JSGlobalObject, comptime lifetime: JSC.WebCore.Lifetime) bun.JSError!JSValue { + switch (this.*) { + .Blob => return this.Blob.toJSON(global, lifetime), + // .InlineBlob => { + // if (this.InlineBlob.len == 0) { + // return JSValue.jsNull(); + // } + // var str = this.InlineBlob.toStringOwned(global); + // return str.parseJSON(global); + // }, + .InternalBlob => { + if (this.InternalBlob.bytes.items.len == 0) { + return JSValue.jsNull(); + } + + const str = this.InternalBlob.toJSON(global); + + // the GC will collect the string + this.* = .{ + .Blob = .{}, + }; + + return str; + }, + .WTFStringImpl => { + var str = bun.String.init(this.WTFStringImpl); + defer str.deref(); + this.* = .{ + .Blob = .{}, + }; + + if (str.length() == 0) { + return JSValue.jsNull(); + } + + return str.toJSByParseJSON(global); + }, + } + } + + pub fn toJSONShare(this: *Any, global: *JSGlobalObject) bun.JSError!JSValue { + return this.toJSON(global, .share); + } + + pub fn toStringTransfer(this: *Any, global: *JSGlobalObject) bun.JSError!JSValue { + return this.toString(global, .transfer); + } + + pub fn toUint8ArrayTransfer(this: *Any, global: *JSGlobalObject) bun.JSError!JSValue { + return this.toUint8Array(global, .transfer); + } + + pub fn toArrayBufferTransfer(this: *Any, global: *JSGlobalObject) bun.JSError!JSValue { + return this.toArrayBuffer(global, .transfer); + } + + pub fn toBlob(this: *Any, global: *JSGlobalObject) Blob { + if (this.size() == 0) { + return Blob.initEmpty(global); + } + + if (this.* == .Blob) { + return this.Blob.dupe(); + } + + if (this.* == .WTFStringImpl) { + const blob = Blob.create(this.slice(), bun.default_allocator, global, true); + this.* = .{ .Blob = .{} }; + return blob; + } + + const blob = Blob.init(this.InternalBlob.slice(), this.InternalBlob.bytes.allocator, global); + this.* = .{ .Blob = .{} }; + return blob; + } + + pub fn toString(this: *Any, global: *JSGlobalObject, comptime lifetime: JSC.WebCore.Lifetime) bun.JSError!JSValue { + switch (this.*) { + .Blob => return this.Blob.toString(global, lifetime), + // .InlineBlob => { + // if (this.InlineBlob.len == 0) { + // return ZigString.Empty.toValue(global); + // } + // const owned = this.InlineBlob.toStringOwned(global); + // this.* = .{ .InlineBlob = .{ .len = 0 } }; + // return owned; + // }, + .InternalBlob => { + if (this.InternalBlob.bytes.items.len == 0) { + return ZigString.Empty.toJS(global); + } + + const owned = this.InternalBlob.toStringOwned(global); + this.* = .{ .Blob = .{} }; + return owned; + }, + .WTFStringImpl => { + var str = bun.String.init(this.WTFStringImpl); + defer str.deref(); + this.* = .{ .Blob = .{} }; + + return str.toJS(global); + }, + } + } + + pub fn toArrayBuffer(this: *Any, global: *JSGlobalObject, comptime lifetime: JSC.WebCore.Lifetime) bun.JSError!JSValue { + return this.toArrayBufferView(global, lifetime, .ArrayBuffer); + } + + pub fn toUint8Array(this: *Any, global: *JSGlobalObject, comptime lifetime: JSC.WebCore.Lifetime) bun.JSError!JSValue { + return this.toArrayBufferView(global, lifetime, .Uint8Array); + } + + pub fn toArrayBufferView(this: *Any, global: *JSGlobalObject, comptime lifetime: JSC.WebCore.Lifetime, comptime TypedArrayView: JSC.JSValue.JSType) bun.JSError!JSValue { + switch (this.*) { + .Blob => return this.Blob.toArrayBufferView(global, lifetime, TypedArrayView), + // .InlineBlob => { + // if (this.InlineBlob.len == 0) { + // return JSC.ArrayBuffer.create(global, "", .ArrayBuffer); + // } + // var bytes = this.InlineBlob.sliceConst(); + // this.InlineBlob.len = 0; + // const value = JSC.ArrayBuffer.create( + // global, + // bytes, + // .ArrayBuffer, + // ); + // return value; + // }, + .InternalBlob => { + if (this.InternalBlob.bytes.items.len == 0) { + return JSC.ArrayBuffer.create(global, "", TypedArrayView); + } + + const bytes = this.InternalBlob.toOwnedSlice(); + this.* = .{ .Blob = .{} }; + + return JSC.ArrayBuffer.fromDefaultAllocator( + global, + bytes, + TypedArrayView, + ); + }, + .WTFStringImpl => { + const str = bun.String.init(this.WTFStringImpl); + this.* = .{ .Blob = .{} }; + defer str.deref(); + + const out_bytes = str.toUTF8WithoutRef(bun.default_allocator); + if (out_bytes.isAllocated()) { + return JSC.ArrayBuffer.fromDefaultAllocator( + global, + @constCast(out_bytes.slice()), + TypedArrayView, + ); + } + + return JSC.ArrayBuffer.create(global, out_bytes.slice(), TypedArrayView); + }, + } + } + + pub fn isDetached(this: *const Any) bool { + return switch (this.*) { + .Blob => |blob| blob.isDetached(), + .InternalBlob => this.InternalBlob.bytes.items.len == 0, + .WTFStringImpl => this.WTFStringImpl.length() == 0, + }; + } + + pub fn store(this: *const @This()) ?*Blob.Store { + if (this.* == .Blob) { + return this.Blob.store; + } + + return null; + } + + pub fn contentType(self: *const @This()) []const u8 { + return switch (self.*) { + .Blob => self.Blob.content_type, + .WTFStringImpl => MimeType.text.value, + // .InlineBlob => self.InlineBlob.contentType(), + .InternalBlob => self.InternalBlob.contentType(), + }; + } + + pub fn wasString(self: *const @This()) bool { + return switch (self.*) { + .Blob => self.Blob.is_all_ascii orelse false, + .WTFStringImpl => true, + // .InlineBlob => self.InlineBlob.was_string, + .InternalBlob => self.InternalBlob.was_string, + }; + } + + pub inline fn slice(self: *const @This()) []const u8 { + return switch (self.*) { + .Blob => self.Blob.sharedView(), + .WTFStringImpl => self.WTFStringImpl.utf8Slice(), + // .InlineBlob => self.InlineBlob.sliceConst(), + .InternalBlob => self.InternalBlob.sliceConst(), + }; + } + + pub fn needsToReadFile(self: *const @This()) bool { + return switch (self.*) { + .Blob => self.Blob.needsToReadFile(), + .WTFStringImpl, .InternalBlob => false, + }; + } + + pub fn isS3(self: *const @This()) bool { + return switch (self.*) { + .Blob => self.Blob.isS3(), + .WTFStringImpl, .InternalBlob => false, + }; + } + + pub fn detach(self: *@This()) void { + return switch (self.*) { + .Blob => { + self.Blob.detach(); + self.* = .{ + .Blob = .{}, + }; + }, + // .InlineBlob => { + // self.InlineBlob.len = 0; + // }, + .InternalBlob => { + self.InternalBlob.bytes.clearAndFree(); + self.* = .{ .Blob = .{} }; + }, + .WTFStringImpl => { + self.WTFStringImpl.deref(); + self.* = .{ .Blob = .{} }; + }, + }; + } +}; + +/// A single-use Blob backed by an allocation of memory. +pub const Internal = struct { + bytes: std.ArrayList(u8), + was_string: bool = false, + + pub fn memoryCost(this: *const @This()) usize { + return this.bytes.capacity; + } + + pub fn toStringOwned(this: *@This(), globalThis: *JSC.JSGlobalObject) JSValue { + const bytes_without_bom = strings.withoutUTF8BOM(this.bytes.items); + if (strings.toUTF16Alloc(globalThis.allocator(), bytes_without_bom, false, false) catch &[_]u16{}) |out| { + const return_value = ZigString.toExternalU16(out.ptr, out.len, globalThis); + return_value.ensureStillAlive(); + this.deinit(); + return return_value; + } else if + // If there was a UTF8 BOM, we clone it + (bytes_without_bom.len != this.bytes.items.len) { + defer this.deinit(); + var out = bun.String.createLatin1(this.bytes.items[3..]); + defer out.deref(); + return out.toJS(globalThis); + } else { + var str = ZigString.init(this.toOwnedSlice()); + str.mark(); + return str.toExternalValue(globalThis); + } + } + + pub fn toJSON(this: *@This(), globalThis: *JSC.JSGlobalObject) JSValue { + const str_bytes = ZigString.init(strings.withoutUTF8BOM(this.bytes.items)).withEncoding(); + const json = str_bytes.toJSONObject(globalThis); + this.deinit(); + return json; + } + + pub inline fn sliceConst(this: *const @This()) []const u8 { + return this.bytes.items; + } + + pub fn deinit(this: *@This()) void { + this.bytes.clearAndFree(); + } + + pub inline fn slice(this: @This()) []u8 { + return this.bytes.items; + } + + pub fn toOwnedSlice(this: *@This()) []u8 { + const bytes = this.bytes.items; + this.bytes.items = &.{}; + this.bytes.capacity = 0; + return bytes; + } + + pub fn clearAndFree(this: *@This()) void { + this.bytes.clearAndFree(); + } + + pub fn contentType(self: *const @This()) []const u8 { + if (self.was_string) { + return MimeType.text.value; + } + + return MimeType.other.value; + } +}; + +/// A blob which stores all the data in the same space as a real Blob +/// This is an optimization for small Response and Request bodies +/// It means that we can avoid an additional heap allocation for a small response +pub const Inline = extern struct { + const real_blob_size = @sizeOf(Blob); + pub const IntSize = u8; + pub const available_bytes = real_blob_size - @sizeOf(IntSize) - 1 - 1; + bytes: [available_bytes]u8 align(1) = undefined, + len: IntSize align(1) = 0, + was_string: bool align(1) = false, + + pub fn concat(first: []const u8, second: []const u8) Inline { + const total = first.len + second.len; + assert(total <= available_bytes); + + var inline_blob: JSC.WebCore.InlineBlob = .{}; + var bytes_slice = inline_blob.bytes[0..total]; + + if (first.len > 0) + @memcpy(bytes_slice[0..first.len], first); + + if (second.len > 0) + @memcpy(bytes_slice[first.len..][0..second.len], second); + + inline_blob.len = @as(@TypeOf(inline_blob.len), @truncate(total)); + return inline_blob; + } + + fn internalInit(data: []const u8, was_string: bool) Inline { + assert(data.len <= available_bytes); + + var blob = Inline{ + .len = @as(IntSize, @intCast(data.len)), + .was_string = was_string, + }; + + if (data.len > 0) + @memcpy(blob.bytes[0..data.len], data); + return blob; + } + + pub fn init(data: []const u8) Inline { + return internalInit(data, false); + } + + pub fn initString(data: []const u8) Inline { + return internalInit(data, true); + } + + pub fn toStringOwned(this: *@This(), globalThis: *JSC.JSGlobalObject) JSValue { + if (this.len == 0) + return ZigString.Empty.toJS(globalThis); + + var str = ZigString.init(this.sliceConst()); + + if (!strings.isAllASCII(this.sliceConst())) { + str.markUTF8(); + } + + const out = str.toJS(globalThis); + out.ensureStillAlive(); + this.len = 0; + return out; + } + + pub fn contentType(self: *const @This()) []const u8 { + if (self.was_string) { + return MimeType.text.value; + } + + return MimeType.other.value; + } + + pub fn deinit(_: *@This()) void {} + + pub inline fn slice(this: *@This()) []u8 { + return this.bytes[0..this.len]; + } + + pub inline fn sliceConst(this: *const @This()) []const u8 { + return this.bytes[0..this.len]; + } + + pub fn toOwnedSlice(this: *@This()) []u8 { + return this.slice(); + } + + pub fn clearAndFree(_: *@This()) void {} +}; + +pub export fn JSDOMFile__hasInstance(_: JSC.JSValue, _: *JSC.JSGlobalObject, value: JSC.JSValue) callconv(JSC.conv) bool { + JSC.markBinding(@src()); + const blob = value.as(Blob) orelse return false; + return blob.is_jsdom_file; +} + +// TODO: move to bun.sys? +pub fn FileOpener(comptime This: type) type { + return struct { + context: *This, + + const State = @This(); + + const __opener_flags = bun.O.NONBLOCK | bun.O.CLOEXEC; + + const open_flags_ = if (@hasDecl(This, "open_flags")) + This.open_flags | __opener_flags + else + bun.O.RDONLY | __opener_flags; + + fn getFdByOpening(this: *This, comptime Callback: OpenCallback) void { + var buf: bun.PathBuffer = undefined; + var path_string = if (@hasField(This, "file_store")) + this.file_store.pathlike.path + else + this.file_blob.store.?.data.file.pathlike.path; + + const path = path_string.sliceZ(&buf); + + if (Environment.isWindows) { + const WrappedCallback = struct { + pub fn callback(req: *libuv.fs_t) callconv(.C) void { + var self: *This = @alignCast(@ptrCast(req.data.?)); + { + defer req.deinit(); + if (req.result.errEnum()) |errEnum| { + var path_string_2 = if (@hasField(This, "file_store")) + self.file_store.pathlike.path + else + self.file_blob.store.?.data.file.pathlike.path; + self.errno = bun.errnoToZigErr(errEnum); + self.system_error = bun.sys.Error.fromCode(errEnum, .open) + .withPath(path_string_2.slice()) + .toSystemError(); + self.opened_fd = invalid_fd; + } else { + self.opened_fd = req.result.toFD(); + } + } + Callback(self, self.opened_fd); + } + }; + + const rc = libuv.uv_fs_open( + this.loop, + &this.req, + path, + open_flags_, + JSC.Node.fs.default_permission, + &WrappedCallback.callback, + ); + if (rc.errEnum()) |errno| { + this.errno = bun.errnoToZigErr(errno); + this.system_error = bun.sys.Error.fromCode(errno, .open).withPath(path_string.slice()).toSystemError(); + this.opened_fd = invalid_fd; + Callback(this, invalid_fd); + } + this.req.data = @ptrCast(this); + return; + } + + while (true) { + this.opened_fd = switch (bun.sys.open(path, open_flags_, JSC.Node.fs.default_permission)) { + .result => |fd| fd, + .err => |err| { + if (comptime @hasField(This, "mkdirp_if_not_exists")) { + if (err.errno == @intFromEnum(bun.sys.E.NOENT)) { + switch (mkdirIfNotExists(this, err, path, path_string.slice())) { + .@"continue" => continue, + .fail => { + this.opened_fd = invalid_fd; + break; + }, + .no => {}, + } + } + } + + this.errno = bun.errnoToZigErr(err.errno); + this.system_error = err.withPath(path_string.slice()).toSystemError(); + this.opened_fd = invalid_fd; + break; + }, + }; + break; + } + + Callback(this, this.opened_fd); + } + + const OpenCallback = *const fn (*This, bun.FileDescriptor) void; + + pub fn getFd(this: *This, comptime Callback: OpenCallback) void { + if (this.opened_fd != invalid_fd) { + Callback(this, this.opened_fd); + return; + } + + if (@hasField(This, "file_store")) { + const pathlike = this.file_store.pathlike; + if (pathlike == .fd) { + this.opened_fd = pathlike.fd; + Callback(this, this.opened_fd); + return; + } + } else { + const pathlike = this.file_blob.store.?.data.file.pathlike; + if (pathlike == .fd) { + this.opened_fd = pathlike.fd; + Callback(this, this.opened_fd); + return; + } + } + + getFdByOpening(this, Callback); + } + }; +} + +// TODO: move to bun.sys? +pub fn FileCloser(comptime This: type) type { + return struct { + fn scheduleClose(request: *io.Request) io.Action { + var this: *This = @alignCast(@fieldParentPtr("io_request", request)); + return io.Action{ + .close = .{ + .ctx = this, + .fd = this.opened_fd, + .onDone = @ptrCast(&onIORequestClosed), + .poll = &this.io_poll, + .tag = This.io_tag, + }, + }; + } + + fn onIORequestClosed(this: *This) void { + this.io_poll.flags.remove(.was_ever_registered); + this.task = .{ .callback = &onCloseIORequest }; + bun.JSC.WorkPool.schedule(&this.task); + } + + fn onCloseIORequest(task: *JSC.WorkPoolTask) void { + debug("onCloseIORequest()", .{}); + var this: *This = @alignCast(@fieldParentPtr("task", task)); + this.close_after_io = false; + this.update(); + } + + pub fn doClose(this: *This, is_allowed_to_close_fd: bool) bool { + if (@hasField(This, "io_request")) { + if (this.close_after_io) { + this.state.store(ClosingState.closing, .seq_cst); + + @atomicStore(@TypeOf(this.io_request.callback), &this.io_request.callback, &scheduleClose, .seq_cst); + if (!this.io_request.scheduled) + io.Loop.get().schedule(&this.io_request); + return true; + } + } + + if (is_allowed_to_close_fd and + this.opened_fd != invalid_fd and + this.opened_fd.stdioTag() == null) + { + if (comptime Environment.isWindows) { + bun.Async.Closer.close(this.opened_fd, this.loop); + } else { + _ = this.opened_fd.closeAllowingBadFileDescriptor(null); + } + this.opened_fd = invalid_fd; + } + + return false; + } + }; +} + +const std = @import("std"); +const Api = @import("../../api/schema.zig").Api; +const bun = @import("bun"); +const MimeType = http.MimeType; +const ZigURL = @import("../../url.zig").URL; +const http = bun.http; +const JSC = bun.JSC; +const io = bun.io; +const Method = @import("../../http/method.zig").Method; +const FetchHeaders = bun.webcore.FetchHeaders; +const ObjectPool = @import("../../pool.zig").ObjectPool; +const SystemError = JSC.SystemError; +const Output = bun.Output; +const MutableString = bun.MutableString; +const strings = bun.strings; +const string = bun.string; +const default_allocator = bun.default_allocator; +const FeatureFlags = bun.FeatureFlags; +const JSError = bun.JSError; +const assert = bun.assert; +const streams = bun.webcore.streams; + +const Environment = @import("../../env.zig"); +const ZigString = JSC.ZigString; +const IdentityContext = @import("../../identity_context.zig").IdentityContext; +const JSPromise = JSC.JSPromise; +const JSValue = JSC.JSValue; +const JSGlobalObject = JSC.JSGlobalObject; +const NullableAllocator = bun.NullableAllocator; + +const VirtualMachine = JSC.VirtualMachine; +const Task = JSC.Task; +const JSPrinter = bun.js_printer; +const picohttp = bun.picohttp; +const StringJoiner = bun.StringJoiner; +const uws = bun.uws; + +const invalid_fd = bun.invalid_fd; +const Response = JSC.WebCore.Response; +const Body = JSC.WebCore.Body; +const Request = JSC.WebCore.Request; + +const libuv = bun.windows.libuv; + +const S3 = bun.S3; +const S3File = @import("S3File.zig"); +const S3Credentials = S3.S3Credentials; +const PathOrBlob = JSC.Node.PathOrBlob; +const PathLike = JSC.Node.PathLike; +const WriteFilePromise = write_file.WriteFilePromise; +const WriteFileWaitFromLockedValueTask = write_file.WriteFileWaitFromLockedValueTask; +const NewReadFileHandler = read_file.NewReadFileHandler; diff --git a/src/bun.js/webcore/Body.zig b/src/bun.js/webcore/Body.zig new file mode 100644 index 0000000000..0f4944576f --- /dev/null +++ b/src/bun.js/webcore/Body.zig @@ -0,0 +1,1736 @@ +//! https://developer.mozilla.org/en-US/docs/Web/API/Body +const Body = @This(); + +value: Value, // = Value.empty, + +pub inline fn len(this: *const Body) Blob.SizeType { + return this.value.size(); +} + +pub fn slice(this: *const Body) []const u8 { + return this.value.slice(); +} + +pub fn use(this: *Body) Blob { + return this.value.use(); +} + +pub fn clone(this: *Body, globalThis: *JSGlobalObject) Body { + return Body{ + .value = this.value.clone(globalThis), + }; +} + +pub fn writeFormat(this: *Body, comptime Formatter: type, formatter: *Formatter, writer: anytype, comptime enable_ansi_colors: bool) !void { + const Writer = @TypeOf(writer); + + try formatter.writeIndent(Writer, writer); + try writer.writeAll(comptime Output.prettyFmt("bodyUsed: ", enable_ansi_colors)); + try formatter.printAs(.Boolean, Writer, writer, JSC.JSValue.jsBoolean(this.value == .Used), .BooleanObject, enable_ansi_colors); + + if (this.value == .Blob) { + try formatter.printComma(Writer, writer, enable_ansi_colors); + try writer.writeAll("\n"); + try formatter.writeIndent(Writer, writer); + try this.value.Blob.writeFormat(Formatter, formatter, writer, enable_ansi_colors); + } else if (this.value == .InternalBlob or this.value == .WTFStringImpl) { + try formatter.printComma(Writer, writer, enable_ansi_colors); + try writer.writeAll("\n"); + try formatter.writeIndent(Writer, writer); + try Blob.writeFormatForSize(false, this.value.size(), writer, enable_ansi_colors); + } else if (this.value == .Locked) { + if (this.value.Locked.readable.get(this.value.Locked.global)) |stream| { + try formatter.printComma(Writer, writer, enable_ansi_colors); + try writer.writeAll("\n"); + try formatter.writeIndent(Writer, writer); + try formatter.printAs(.Object, Writer, writer, stream.value, stream.value.jsType(), enable_ansi_colors); + } + } +} + +pub fn deinit(this: *Body, _: std.mem.Allocator) void { + this.value.deinit(); +} + +pub const PendingValue = struct { + promise: ?JSValue = null, + readable: JSC.WebCore.ReadableStream.Strong = .{}, + // writable: JSC.WebCore.Sink + + global: *JSGlobalObject, + task: ?*anyopaque = null, + + /// runs after the data is available. + onReceiveValue: ?*const fn (ctx: *anyopaque, value: *Value) void = null, + + /// conditionally runs when requesting data + /// used in HTTP server to ignore request bodies unless asked for it + onStartBuffering: ?*const fn (ctx: *anyopaque) void = null, + onStartStreaming: ?*const fn (ctx: *anyopaque) JSC.WebCore.DrainResult = null, + onReadableStreamAvailable: ?*const fn (ctx: *anyopaque, globalThis: *JSC.JSGlobalObject, readable: JSC.WebCore.ReadableStream) void = null, + size_hint: Blob.SizeType = 0, + + deinit: bool = false, + action: Action = Action{ .none = {} }, + + /// For Http Client requests + /// when Content-Length is provided this represents the whole size of the request + /// If chunked encoded this will represent the total received size (ignoring the chunk headers) + /// If the size is unknown will be 0 + fn sizeHint(this: *const PendingValue) Blob.SizeType { + if (this.readable.get(this.global)) |readable| { + if (readable.ptr == .Bytes) { + return readable.ptr.Bytes.size_hint; + } + } + return this.size_hint; + } + + pub fn toAnyBlob(this: *PendingValue) ?AnyBlob { + if (this.promise != null) + return null; + + return this.toAnyBlobAllowPromise(); + } + + pub fn isDisturbed(this: *const PendingValue, comptime T: type, globalObject: *JSC.JSGlobalObject, this_value: JSC.JSValue) bool { + if (this.promise != null) { + return true; + } + + if (T.js.bodyGetCached(this_value)) |body_value| { + if (JSC.WebCore.ReadableStream.isDisturbedValue(body_value, globalObject)) { + return true; + } + + return false; + } + + if (this.readable.get(globalObject)) |readable| { + return readable.isDisturbed(globalObject); + } + + return false; + } + + pub fn isDisturbed2(this: *const PendingValue, globalObject: *JSC.JSGlobalObject) bool { + if (this.promise != null) { + return true; + } + + if (this.readable.get(globalObject)) |readable| { + return readable.isDisturbed(globalObject); + } + + return false; + } + pub fn isStreamingOrBuffering(this: *PendingValue) bool { + return this.readable.held.has() or (this.promise != null and !this.promise.?.isEmptyOrUndefinedOrNull()); + } + + pub fn hasPendingPromise(this: *PendingValue) bool { + const promise = this.promise orelse return false; + + if (promise.asAnyPromise()) |internal| { + if (internal.status(this.global.vm()) != .pending) { + promise.unprotect(); + this.promise = null; + return false; + } + + return true; + } + + this.promise = null; + return false; + } + + pub fn toAnyBlobAllowPromise(this: *PendingValue) ?AnyBlob { + var stream = if (this.readable.get(this.global)) |readable| readable else return null; + + if (stream.toAnyBlob(this.global)) |blob| { + this.readable.deinit(); + return blob; + } + + return null; + } + + pub fn setPromise(value: *PendingValue, globalThis: *JSC.JSGlobalObject, action: Action) JSValue { + value.action = action; + if (value.readable.get(globalThis)) |readable| { + switch (action) { + .getFormData, .getText, .getJSON, .getBlob, .getArrayBuffer, .getBytes => { + const promise = switch (action) { + .getJSON => globalThis.readableStreamToJSON(readable.value), + .getArrayBuffer => globalThis.readableStreamToArrayBuffer(readable.value), + .getBytes => globalThis.readableStreamToBytes(readable.value), + .getText => globalThis.readableStreamToText(readable.value), + .getBlob => globalThis.readableStreamToBlob(readable.value), + .getFormData => |form_data| brk: { + defer { + form_data.?.deinit(); + value.action.getFormData = null; + } + + break :brk globalThis.readableStreamToFormData(readable.value, switch (form_data.?.encoding) { + .Multipart => |multipart| bun.String.init(multipart).toJS(globalThis), + .URLEncoded => .undefined, + }); + }, + else => unreachable, + }; + value.readable.deinit(); + // The ReadableStream within is expected to keep this Promise alive. + // If you try to protect() this, it will leak memory because the other end of the ReadableStream won't call it. + // See https://github.com/oven-sh/bun/issues/13678 + return promise; + }, + + .none => {}, + } + } + + { + var promise = JSC.JSPromise.create(globalThis); + const promise_value = promise.asValue(globalThis); + value.promise = promise_value; + promise_value.protect(); + + if (value.onStartBuffering) |onStartBuffering| { + value.onStartBuffering = null; + onStartBuffering(value.task.?); + } + return promise_value; + } + } + + pub const Action = union(enum) { + none: void, + getText: void, + getJSON: void, + getArrayBuffer: void, + getBytes: void, + getBlob: void, + getFormData: ?*bun.FormData.AsyncFormData, + }; +}; + +/// This is a duplex stream! +pub const Value = union(Tag) { + const log = Output.scoped(.BodyValue, false); + + const pool_size = if (bun.heap_breakdown.enabled) 0 else 256; + pub const HiveRef = bun.HiveRef(JSC.WebCore.Body.Value, pool_size); + pub const HiveAllocator = bun.HiveArray(HiveRef, pool_size).Fallback; + + Blob: Blob, + + /// This is the String type from WebKit + /// It is reference counted, so we must always deref it (which this does automatically) + /// Be careful where it can directly be used. + /// + /// If it is a latin1 string with only ascii, we can use it directly. + /// Otherwise, we must convert it to utf8. + /// + /// Unless we are sending it directly to JavaScript, for example: + /// + /// var str = "hello world 🤭" + /// var response = new Response(str); + /// /* Body.Value stays WTFStringImpl */ + /// var body = await response.text(); + /// + /// In this case, even though there's an emoji, we can use the StringImpl directly. + /// BUT, if we were instead using it in the HTTP server, this cannot be used directly. + /// + /// When the server calls .toBlobIfPossible(), we will automatically + /// convert this Value to an InternalBlob + /// + /// Example code: + /// + /// Bun.serve({ + /// fetch(req) { + /// /* Body.Value becomes InternalBlob */ + /// return new Response("hello world 🤭"); + /// } + /// }) + /// + /// This works for .json(), too. + WTFStringImpl: bun.WTF.StringImpl, + /// Single-use Blob + /// Avoids a heap allocation. + InternalBlob: InternalBlob, + /// Single-use Blob that stores the bytes in the Value itself. + // InlineBlob: InlineBlob, + Locked: PendingValue, + Used, + Empty, + Error: ValueError, + Null, + + // We may not have all the data yet + // So we can't know for sure if it's empty or not + // We CAN know that it is definitely empty. + pub fn isDefinitelyEmpty(this: *const Value) bool { + return switch (this.*) { + .Null => true, + .Used, .Empty => true, + .InternalBlob => this.InternalBlob.slice().len == 0, + .Blob => this.Blob.size == 0, + .WTFStringImpl => this.WTFStringImpl.length() == 0, + .Error, .Locked => false, + }; + } + + pub const heap_breakdown_label = "BodyValue"; + pub const ValueError = union(enum) { + AbortReason: JSC.CommonAbortReason, + SystemError: JSC.SystemError, + Message: bun.String, + JSValue: JSC.Strong, + + pub fn toStreamError(this: *@This(), globalObject: *JSC.JSGlobalObject) streams.Result.StreamError { + return switch (this.*) { + .AbortReason => .{ + .AbortReason = this.AbortReason, + }, + else => .{ + .JSValue = this.toJS(globalObject), + }, + }; + } + + pub fn toJS(this: *@This(), globalObject: *JSC.JSGlobalObject) JSC.JSValue { + const js_value = switch (this.*) { + .AbortReason => |reason| reason.toJS(globalObject), + .SystemError => |system_error| system_error.toErrorInstance(globalObject), + .Message => |message| message.toErrorInstance(globalObject), + // do a early return in this case we don't need to create a new Strong + .JSValue => |js_value| return js_value.get() orelse JSC.JSValue.jsUndefined(), + }; + this.* = .{ .JSValue = JSC.Strong.create(js_value, globalObject) }; + return js_value; + } + + pub fn dupe(this: *const @This(), globalObject: *JSC.JSGlobalObject) @This() { + var value = this.*; + switch (this.*) { + .SystemError => value.SystemError.ref(), + .Message => value.Message.ref(), + .JSValue => |js_ref| { + if (js_ref.get()) |js_value| { + return .{ .JSValue = JSC.Strong.create(js_value, globalObject) }; + } + return .{ .JSValue = .empty }; + }, + .AbortReason => {}, + } + return value; + } + + pub fn deinit(this: *@This()) void { + switch (this.*) { + .SystemError => |system_error| system_error.deref(), + .Message => |message| message.deref(), + .JSValue => this.JSValue.deinit(), + .AbortReason => {}, + } + // safe empty value after deinit + this.* = .{ .JSValue = .empty }; + } + }; + pub fn toBlobIfPossible(this: *Value) void { + if (this.* == .WTFStringImpl) { + if (this.WTFStringImpl.toUTF8IfNeeded(bun.default_allocator)) |bytes| { + var str = this.WTFStringImpl; + defer str.deref(); + this.* = .{ + .InternalBlob = InternalBlob{ + .bytes = std.ArrayList(u8).fromOwnedSlice(bun.default_allocator, @constCast(bytes.slice())), + .was_string = true, + }, + }; + } + } + + if (this.* != .Locked) + return; + + if (this.Locked.toAnyBlob()) |blob| { + this.* = switch (blob) { + .Blob => .{ .Blob = blob.Blob }, + .InternalBlob => .{ .InternalBlob = blob.InternalBlob }, + .WTFStringImpl => .{ .WTFStringImpl = blob.WTFStringImpl }, + // .InlineBlob => .{ .InlineBlob = blob.InlineBlob }, + }; + } + } + + pub fn size(this: *const Value) Blob.SizeType { + return switch (this.*) { + .Blob => this.Blob.size, + .InternalBlob => @as(Blob.SizeType, @truncate(this.InternalBlob.sliceConst().len)), + .WTFStringImpl => @as(Blob.SizeType, @truncate(this.WTFStringImpl.utf8ByteLength())), + .Locked => this.Locked.sizeHint(), + // .InlineBlob => @truncate(Blob.SizeType, this.InlineBlob.sliceConst().len), + else => 0, + }; + } + + pub fn fastSize(this: *const Value) Blob.SizeType { + return switch (this.*) { + .InternalBlob => @as(Blob.SizeType, @truncate(this.InternalBlob.sliceConst().len)), + .WTFStringImpl => @as(Blob.SizeType, @truncate(this.WTFStringImpl.byteSlice().len)), + .Locked => this.Locked.sizeHint(), + // .InlineBlob => @truncate(Blob.SizeType, this.InlineBlob.sliceConst().len), + else => 0, + }; + } + + pub fn memoryCost(this: *const Value) usize { + return switch (this.*) { + .InternalBlob => this.InternalBlob.bytes.items.len, + .WTFStringImpl => this.WTFStringImpl.memoryCost(), + .Locked => this.Locked.sizeHint(), + // .InlineBlob => this.InlineBlob.sliceConst().len, + else => 0, + }; + } + + pub fn estimatedSize(this: *const Value) usize { + return switch (this.*) { + .InternalBlob => this.InternalBlob.sliceConst().len, + .WTFStringImpl => this.WTFStringImpl.byteSlice().len, + .Locked => this.Locked.sizeHint(), + // .InlineBlob => this.InlineBlob.sliceConst().len, + else => 0, + }; + } + + pub fn createBlobValue(data: []u8, allocator: std.mem.Allocator, was_string: bool) Value { + // if (data.len <= InlineBlob.available_bytes) { + // var _blob = InlineBlob{ + // .bytes = undefined, + // .was_string = was_string, + // .len = @truncate(InlineBlob.IntSize, data.len), + // }; + // @memcpy(&_blob.bytes, data.ptr, data.len); + // allocator.free(data); + // return Value{ + // .InlineBlob = _blob, + // }; + // } + + return Value{ + .InternalBlob = InternalBlob{ + .bytes = std.ArrayList(u8).fromOwnedSlice(allocator, data), + .was_string = was_string, + }, + }; + } + + pub const Tag = enum { + Blob, + WTFStringImpl, + InternalBlob, + // InlineBlob, + Locked, + Used, + Empty, + Error, + Null, + }; + + // pub const empty = Value{ .Empty = {} }; + + pub fn toReadableStream(this: *Value, globalThis: *JSGlobalObject) JSValue { + JSC.markBinding(@src()); + + switch (this.*) { + .Used => { + return JSC.WebCore.ReadableStream.used(globalThis); + }, + .Empty => { + return JSC.WebCore.ReadableStream.empty(globalThis); + }, + .Null => { + return JSValue.null; + }, + .InternalBlob, .Blob, .WTFStringImpl => { + var blob = this.use(); + defer blob.detach(); + blob.resolveSize(); + const value = JSC.WebCore.ReadableStream.fromBlob(globalThis, &blob, blob.size); + + this.* = .{ + .Locked = .{ + .readable = JSC.WebCore.ReadableStream.Strong.init(JSC.WebCore.ReadableStream.fromJS(value, globalThis).?, globalThis), + .global = globalThis, + }, + }; + return value; + }, + .Locked => { + var locked = &this.Locked; + if (locked.readable.get(globalThis)) |readable| { + return readable.value; + } + if (locked.promise != null or locked.action != .none) { + return JSC.WebCore.ReadableStream.used(globalThis); + } + var drain_result: JSC.WebCore.DrainResult = .{ + .estimated_size = 0, + }; + + if (locked.onStartStreaming) |drain| { + locked.onStartStreaming = null; + drain_result = drain(locked.task.?); + } + + if (drain_result == .empty or drain_result == .aborted) { + this.* = .{ .Null = {} }; + return JSC.WebCore.ReadableStream.empty(globalThis); + } + + var reader = JSC.WebCore.ByteStream.Source.new(.{ + .context = undefined, + .globalThis = globalThis, + }); + + reader.context.setup(); + + if (drain_result == .estimated_size) { + reader.context.highWaterMark = @as(Blob.SizeType, @truncate(drain_result.estimated_size)); + reader.context.size_hint = @as(Blob.SizeType, @truncate(drain_result.estimated_size)); + } else if (drain_result == .owned) { + reader.context.buffer = drain_result.owned.list; + reader.context.size_hint = @as(Blob.SizeType, @truncate(drain_result.owned.size_hint)); + } + + locked.readable = JSC.WebCore.ReadableStream.Strong.init(.{ + .ptr = .{ .Bytes = &reader.context }, + .value = reader.toReadableStream(globalThis), + }, globalThis); + + if (locked.onReadableStreamAvailable) |onReadableStreamAvailable| { + onReadableStreamAvailable(locked.task.?, globalThis, locked.readable.get(globalThis).?); + } + + return locked.readable.get(globalThis).?.value; + }, + .Error => { + // TODO: handle error properly + return JSC.WebCore.ReadableStream.empty(globalThis); + }, + } + } + + pub fn fromJS(globalThis: *JSGlobalObject, value: JSValue) bun.JSError!Value { + value.ensureStillAlive(); + + if (value.isEmptyOrUndefinedOrNull()) { + return Body.Value{ + .Null = {}, + }; + } + + const js_type = value.jsType(); + + if (js_type.isStringLike()) { + var str = try value.toBunString(globalThis); + if (str.length() == 0) { + return Body.Value{ + .Empty = {}, + }; + } + + assert(str.tag == .WTFStringImpl); + + return Body.Value{ + .WTFStringImpl = str.value.WTFStringImpl, + }; + } + + if (js_type.isTypedArrayOrArrayBuffer()) { + if (value.asArrayBuffer(globalThis)) |buffer| { + const bytes = buffer.byteSlice(); + + if (bytes.len == 0) { + return Body.Value{ + .Empty = {}, + }; + } + + // if (bytes.len <= InlineBlob.available_bytes) { + // return Body.Value{ + // .InlineBlob = InlineBlob.init(bytes), + // }; + // } + + return Body.Value{ + .InternalBlob = .{ + .bytes = std.ArrayList(u8){ + .items = bun.default_allocator.dupe(u8, bytes) catch { + return globalThis.throwValue(ZigString.static("Failed to clone ArrayBufferView").toErrorInstance(globalThis)); + }, + .capacity = bytes.len, + .allocator = bun.default_allocator, + }, + .was_string = false, + }, + }; + } + } + + if (value.as(JSC.DOMFormData)) |form_data| { + return Body.Value{ + .Blob = Blob.fromDOMFormData(globalThis, bun.default_allocator, form_data), + }; + } + + if (value.as(JSC.URLSearchParams)) |search_params| { + return Body.Value{ + .Blob = Blob.fromURLSearchParams(globalThis, bun.default_allocator, search_params), + }; + } + + if (js_type == .DOMWrapper) { + if (value.as(Blob)) |blob| { + return Body.Value{ + .Blob = blob.dupe(), + }; + } + } + + value.ensureStillAlive(); + + if (JSC.WebCore.ReadableStream.fromJS(value, globalThis)) |readable| { + if (readable.isDisturbed(globalThis)) { + return globalThis.throw("ReadableStream has already been used", .{}); + } + + switch (readable.ptr) { + .Blob => |blob| { + const store = blob.detachStore() orelse { + return Body.Value{ .Blob = Blob.initEmpty(globalThis) }; + }; + + readable.forceDetach(globalThis); + + const result: Value = .{ + .Blob = Blob.initWithStore(store, globalThis), + }; + + return result; + }, + else => {}, + } + + return Body.Value.fromReadableStreamWithoutLockCheck(readable, globalThis); + } + + return Body.Value{ + .Blob = Blob.get(globalThis, value, true, false) catch |err| { + if (!globalThis.hasException()) { + if (err == error.InvalidArguments) { + return globalThis.throwInvalidArguments("Expected an Array", .{}); + } + + return globalThis.throwInvalidArguments("Invalid Body object", .{}); + } + + return error.JSError; + }, + }; + } + + pub fn fromReadableStreamWithoutLockCheck(readable: JSC.WebCore.ReadableStream, globalThis: *JSGlobalObject) Value { + return .{ + .Locked = .{ + .readable = JSC.WebCore.ReadableStream.Strong.init(readable, globalThis), + .global = globalThis, + }, + }; + } + + pub fn resolve( + to_resolve: *Value, + new: *Value, + global: *JSGlobalObject, + headers: ?*FetchHeaders, + ) void { + log("resolve", .{}); + if (to_resolve.* == .Locked) { + var locked = &to_resolve.Locked; + + if (locked.readable.get(global)) |readable| { + readable.done(global); + locked.readable.deinit(); + } + + if (locked.onReceiveValue) |callback| { + locked.onReceiveValue = null; + callback(locked.task.?, new); + return; + } + + if (locked.promise) |promise_| { + const promise = promise_.asAnyPromise().?; + locked.promise = null; + + switch (locked.action) { + // These ones must use promise.wrap() to handle exceptions thrown while calling .toJS() on the value. + // These exceptions can happen if the String is too long, ArrayBuffer is too large, JSON parse error, etc. + .getText => { + switch (new.*) { + .WTFStringImpl, + .InternalBlob, + // .InlineBlob, + => { + var blob = new.useAsAnyBlobAllowNonUTF8String(); + promise.wrap(global, AnyBlob.toStringTransfer, .{ &blob, global }); + }, + else => { + var blob = new.use(); + promise.wrap(global, Blob.toStringTransfer, .{ &blob, global }); + }, + } + }, + .getJSON => { + var blob = new.useAsAnyBlobAllowNonUTF8String(); + promise.wrap(global, AnyBlob.toJSONShare, .{ &blob, global }); + blob.detach(); + }, + .getArrayBuffer => { + var blob = new.useAsAnyBlobAllowNonUTF8String(); + promise.wrap(global, AnyBlob.toArrayBufferTransfer, .{ &blob, global }); + }, + .getBytes => { + var blob = new.useAsAnyBlobAllowNonUTF8String(); + promise.wrap(global, AnyBlob.toUint8ArrayTransfer, .{ &blob, global }); + }, + + .getFormData => inner: { + var blob = new.useAsAnyBlob(); + defer blob.detach(); + var async_form_data: *bun.FormData.AsyncFormData = locked.action.getFormData orelse { + promise.reject(global, ZigString.init("Internal error: task for FormData must not be null").toErrorInstance(global)); + break :inner; + }; + defer async_form_data.deinit(); + async_form_data.toJS(global, blob.slice(), promise); + }, + .none, .getBlob => { + var blob = Blob.new(new.use()); + blob.allocator = bun.default_allocator; + if (headers) |fetch_headers| { + if (fetch_headers.fastGet(.ContentType)) |content_type| { + var content_slice = content_type.toSlice(bun.default_allocator); + defer content_slice.deinit(); + var allocated = false; + const mimeType = MimeType.init(content_slice.slice(), bun.default_allocator, &allocated); + blob.content_type = mimeType.value; + blob.content_type_allocated = allocated; + blob.content_type_was_set = true; + if (blob.store != null) { + blob.store.?.mime_type = mimeType; + } + } + } + if (!blob.content_type_was_set and blob.store != null) { + blob.content_type = MimeType.text.value; + blob.content_type_allocated = false; + blob.content_type_was_set = true; + blob.store.?.mime_type = MimeType.text; + } + promise.resolve(global, blob.toJS(global)); + }, + } + JSC.C.JSValueUnprotect(global, promise_.asObjectRef()); + } + } + } + pub fn slice(this: *const Value) []const u8 { + return switch (this.*) { + .Blob => this.Blob.sharedView(), + .InternalBlob => this.InternalBlob.sliceConst(), + .WTFStringImpl => if (this.WTFStringImpl.canUseAsUTF8()) this.WTFStringImpl.latin1Slice() else "", + // .InlineBlob => this.InlineBlob.sliceConst(), + else => "", + }; + } + + pub fn use(this: *Value) Blob { + this.toBlobIfPossible(); + + switch (this.*) { + .Blob => { + const new_blob = this.Blob; + assert(new_blob.allocator == null); // owned by Body + this.* = .{ .Used = {} }; + return new_blob; + }, + .InternalBlob => { + const new_blob = Blob.init( + this.InternalBlob.toOwnedSlice(), + // we will never resize it from here + // we have to use the default allocator + // even if it was actually allocated on a different thread + bun.default_allocator, + JSC.VirtualMachine.get().global, + ); + + this.* = .{ .Used = {} }; + return new_blob; + }, + .WTFStringImpl => { + var new_blob: Blob = undefined; + var wtf = this.WTFStringImpl; + defer wtf.deref(); + if (wtf.toUTF8IfNeeded(bun.default_allocator)) |allocated_slice| { + new_blob = Blob.init( + @constCast(allocated_slice.slice()), + bun.default_allocator, + JSC.VirtualMachine.get().global, + ); + } else { + new_blob = Blob.init( + bun.default_allocator.dupe(u8, wtf.latin1Slice()) catch bun.outOfMemory(), + bun.default_allocator, + JSC.VirtualMachine.get().global, + ); + } + + this.* = .{ .Used = {} }; + return new_blob; + }, + // .InlineBlob => { + // const cloned = this.InlineBlob.bytes; + // // keep same behavior as InternalBlob but clone the data + // const new_blob = Blob.create( + // cloned[0..this.InlineBlob.len], + // bun.default_allocator, + // JSC.VirtualMachine.get().global, + // false, + // ); + + // this.* = .{ .Used = {} }; + // return new_blob; + // }, + else => { + return Blob.initEmpty(undefined); + }, + } + } + + pub fn tryUseAsAnyBlob(this: *Value) ?AnyBlob { + if (this.* == .WTFStringImpl) { + if (this.WTFStringImpl.canUseAsUTF8()) { + return AnyBlob{ .WTFStringImpl = this.WTFStringImpl }; + } + } + + const any_blob: AnyBlob = switch (this.*) { + .Blob => AnyBlob{ .Blob = this.Blob }, + .InternalBlob => AnyBlob{ .InternalBlob = this.InternalBlob }, + // .InlineBlob => AnyBlob{ .InlineBlob = this.InlineBlob }, + .Locked => this.Locked.toAnyBlobAllowPromise() orelse return null, + else => return null, + }; + + this.* = .{ .Used = {} }; + return any_blob; + } + + pub fn useAsAnyBlob(this: *Value) AnyBlob { + const any_blob: AnyBlob = switch (this.*) { + .Blob => .{ .Blob = this.Blob }, + .InternalBlob => .{ .InternalBlob = this.InternalBlob }, + .WTFStringImpl => |str| brk: { + if (str.toUTF8IfNeeded(bun.default_allocator)) |utf8| { + defer str.deref(); + break :brk .{ + .InternalBlob = InternalBlob{ + .bytes = std.ArrayList(u8).fromOwnedSlice(bun.default_allocator, @constCast(utf8.slice())), + .was_string = true, + }, + }; + } else { + break :brk .{ + .WTFStringImpl = str, + }; + } + }, + // .InlineBlob => .{ .InlineBlob = this.InlineBlob }, + .Locked => this.Locked.toAnyBlobAllowPromise() orelse AnyBlob{ .Blob = .{} }, + else => .{ .Blob = Blob.initEmpty(undefined) }, + }; + + this.* = if (this.* == .Null) + .{ .Null = {} } + else + .{ .Used = {} }; + return any_blob; + } + + pub fn useAsAnyBlobAllowNonUTF8String(this: *Value) AnyBlob { + const any_blob: AnyBlob = switch (this.*) { + .Blob => .{ .Blob = this.Blob }, + .InternalBlob => .{ .InternalBlob = this.InternalBlob }, + .WTFStringImpl => .{ .WTFStringImpl = this.WTFStringImpl }, + // .InlineBlob => .{ .InlineBlob = this.InlineBlob }, + .Locked => this.Locked.toAnyBlobAllowPromise() orelse AnyBlob{ .Blob = .{} }, + else => .{ .Blob = Blob.initEmpty(undefined) }, + }; + + this.* = if (this.* == .Null) + .{ .Null = {} } + else + .{ .Used = {} }; + return any_blob; + } + + pub fn toErrorInstance(this: *Value, err: ValueError, global: *JSGlobalObject) void { + if (this.* == .Locked) { + var locked = this.Locked; + this.* = .{ .Error = err }; + + var strong_readable = locked.readable; + locked.readable = .{}; + defer strong_readable.deinit(); + + if (locked.hasPendingPromise()) { + const promise = locked.promise.?; + defer promise.unprotect(); + locked.promise = null; + + if (promise.asAnyPromise()) |internal| { + internal.reject(global, this.Error.toJS(global)); + } + } + + // The Promise version goes before the ReadableStream version incase the Promise version is used too. + // Avoid creating unnecessary duplicate JSValue. + if (strong_readable.get(global)) |readable| { + if (readable.ptr == .Bytes) { + readable.ptr.Bytes.onData( + .{ .err = this.Error.toStreamError(global) }, + bun.default_allocator, + ); + } else { + readable.abort(global); + } + } + + if (locked.onReceiveValue) |onReceiveValue| { + locked.onReceiveValue = null; + onReceiveValue(locked.task.?, this); + } + return; + } + this.* = .{ .Error = err }; + } + + pub fn toError(this: *Value, err: anyerror, global: *JSGlobalObject) void { + return this.toErrorInstance(.{ .Message = bun.String.createFormat( + "Error reading file {s}", + .{@errorName(err)}, + ) catch bun.outOfMemory() }, global); + } + + pub fn deinit(this: *Value) void { + const tag = @as(Tag, this.*); + if (tag == .Locked) { + if (!this.Locked.deinit) { + this.Locked.deinit = true; + this.Locked.readable.deinit(); + this.Locked.readable = .{}; + } + + return; + } + + if (tag == .InternalBlob) { + this.InternalBlob.clearAndFree(); + this.* = Value{ .Null = {} }; + } + + if (tag == .Blob) { + this.Blob.deinit(); + this.* = Value{ .Null = {} }; + } + + if (tag == .WTFStringImpl) { + this.WTFStringImpl.deref(); + this.* = Value{ .Null = {} }; + } + + if (tag == .Error) { + this.Error.deinit(); + } + } + + pub fn tee(this: *Value, globalThis: *JSC.JSGlobalObject) Value { + var locked = &this.Locked; + + if (locked.readable.isDisturbed(globalThis)) { + return Value{ .Used = {} }; + } + + if (locked.readable.tee(globalThis)) |readable| { + return Value{ + .Locked = .{ + .readable = JSC.WebCore.ReadableStream.Strong.init(readable, globalThis), + .global = globalThis, + }, + }; + } + if (locked.promise != null or locked.action != .none or locked.readable.has()) { + return Value{ .Used = {} }; + } + + var drain_result: JSC.WebCore.DrainResult = .{ + .estimated_size = 0, + }; + + if (locked.onStartStreaming) |drain| { + locked.onStartStreaming = null; + drain_result = drain(locked.task.?); + } + + if (drain_result == .empty or drain_result == .aborted) { + this.* = .{ .Null = {} }; + return Value{ .Null = {} }; + } + + var reader = JSC.WebCore.ByteStream.Source.new(.{ + .context = undefined, + .globalThis = globalThis, + }); + + reader.context.setup(); + + if (drain_result == .estimated_size) { + reader.context.highWaterMark = @as(Blob.SizeType, @truncate(drain_result.estimated_size)); + reader.context.size_hint = @as(Blob.SizeType, @truncate(drain_result.estimated_size)); + } else if (drain_result == .owned) { + reader.context.buffer = drain_result.owned.list; + reader.context.size_hint = @as(Blob.SizeType, @truncate(drain_result.owned.size_hint)); + } + + locked.readable = JSC.WebCore.ReadableStream.Strong.init(.{ + .ptr = .{ .Bytes = &reader.context }, + .value = reader.toReadableStream(globalThis), + }, globalThis); + + if (locked.onReadableStreamAvailable) |onReadableStreamAvailable| { + onReadableStreamAvailable(locked.task.?, globalThis, locked.readable.get(globalThis).?); + } + + const teed = locked.readable.tee(globalThis) orelse return Value{ .Used = {} }; + + return Value{ + .Locked = .{ + .readable = JSC.WebCore.ReadableStream.Strong.init(teed, globalThis), + .global = globalThis, + }, + }; + } + + pub fn clone(this: *Value, globalThis: *JSC.JSGlobalObject) Value { + this.toBlobIfPossible(); + + if (this.* == .Locked) { + return this.tee(globalThis); + } + + if (this.* == .InternalBlob) { + var internal_blob = this.InternalBlob; + this.* = .{ + .Blob = Blob.init( + internal_blob.toOwnedSlice(), + internal_blob.bytes.allocator, + globalThis, + ), + }; + } + + // if (this.* == .InlineBlob) { + // return this.*; + // } + + if (this.* == .Blob) { + return Value{ .Blob = this.Blob.dupe() }; + } + + if (this.* == .WTFStringImpl) { + this.WTFStringImpl.ref(); + return Value{ .WTFStringImpl = this.WTFStringImpl }; + } + + if (this.* == .Null) { + return Value{ .Null = {} }; + } + + return Value{ .Empty = {} }; + } +}; + +// https://github.com/WebKit/webkit/blob/main/Source/WebCore/Modules/fetch/FetchBody.cpp#L45 +pub fn extract( + globalThis: *JSGlobalObject, + value: JSValue, +) bun.JSError!Body { + var body = Body{ .value = Value{ .Null = {} } }; + + body.value = try Value.fromJS(globalThis, value); + if (body.value == .Blob) { + assert(body.value.Blob.allocator == null); // owned by Body + } + return body; +} + +pub fn Mixin(comptime Type: type) type { + return struct { + pub fn getText( + this: *Type, + globalObject: *JSC.JSGlobalObject, + callframe: *JSC.CallFrame, + ) bun.JSError!JSC.JSValue { + var value: *Body.Value = this.getBodyValue(); + if (value.* == .Used) { + return handleBodyAlreadyUsed(globalObject); + } + + if (value.* == .Locked) { + if (value.Locked.action != .none or value.Locked.isDisturbed(Type, globalObject, callframe.this())) { + return handleBodyAlreadyUsed(globalObject); + } + + return value.Locked.setPromise(globalObject, .{ .getText = {} }); + } + + var blob = value.useAsAnyBlobAllowNonUTF8String(); + return JSC.JSPromise.wrap(globalObject, lifetimeWrap(AnyBlob.toString, .transfer), .{ &blob, globalObject }); + } + + pub fn getBody( + this: *Type, + globalThis: *JSC.JSGlobalObject, + ) JSValue { + var body: *Body.Value = this.getBodyValue(); + + if (body.* == .Used) { + return JSC.WebCore.ReadableStream.used(globalThis); + } + + return body.toReadableStream(globalThis); + } + + pub fn getBodyUsed( + this: *Type, + globalObject: *JSC.JSGlobalObject, + ) JSValue { + return JSValue.jsBoolean( + switch (this.getBodyValue().*) { + .Used => true, + .Locked => |*pending| brk: { + if (pending.action != .none) { + break :brk true; + } + + if (pending.readable.get(globalObject)) |*stream| { + break :brk stream.isDisturbed(globalObject); + } + + break :brk false; + }, + else => false, + }, + ); + } + + fn lifetimeWrap(comptime Fn: anytype, comptime lifetime: JSC.WebCore.Lifetime) fn (*AnyBlob, *JSC.JSGlobalObject) JSC.JSValue { + return struct { + fn wrap(this: *AnyBlob, globalObject: *JSC.JSGlobalObject) JSC.JSValue { + return JSC.toJSHostValue(globalObject, Fn(this, globalObject, lifetime)); + } + }.wrap; + } + + pub fn getJSON( + this: *Type, + globalObject: *JSC.JSGlobalObject, + callframe: *JSC.CallFrame, + ) bun.JSError!JSC.JSValue { + var value: *Body.Value = this.getBodyValue(); + if (value.* == .Used) { + return handleBodyAlreadyUsed(globalObject); + } + + if (value.* == .Locked) { + if (value.Locked.action != .none or value.Locked.isDisturbed(Type, globalObject, callframe.this())) { + return handleBodyAlreadyUsed(globalObject); + } + + value.toBlobIfPossible(); + if (value.* == .Locked) { + return value.Locked.setPromise(globalObject, .{ .getJSON = {} }); + } + } + + var blob = value.useAsAnyBlobAllowNonUTF8String(); + + return JSC.JSPromise.wrap(globalObject, lifetimeWrap(AnyBlob.toJSON, .share), .{ &blob, globalObject }); + } + + fn handleBodyAlreadyUsed(globalObject: *JSC.JSGlobalObject) JSValue { + return globalObject.ERR(.BODY_ALREADY_USED, "Body already used", .{}).reject(); + } + + pub fn getArrayBuffer( + this: *Type, + globalObject: *JSC.JSGlobalObject, + callframe: *JSC.CallFrame, + ) bun.JSError!JSC.JSValue { + var value: *Body.Value = this.getBodyValue(); + + if (value.* == .Used) { + return handleBodyAlreadyUsed(globalObject); + } + + if (value.* == .Locked) { + if (value.Locked.action != .none or value.Locked.isDisturbed(Type, globalObject, callframe.this())) { + return handleBodyAlreadyUsed(globalObject); + } + value.toBlobIfPossible(); + + if (value.* == .Locked) { + return value.Locked.setPromise(globalObject, .{ .getArrayBuffer = {} }); + } + } + + // toArrayBuffer in AnyBlob checks for non-UTF8 strings + var blob: AnyBlob = value.useAsAnyBlobAllowNonUTF8String(); + + return JSC.JSPromise.wrap(globalObject, lifetimeWrap(AnyBlob.toArrayBuffer, .transfer), .{ &blob, globalObject }); + } + + pub fn getBytes( + this: *Type, + globalObject: *JSC.JSGlobalObject, + callframe: *JSC.CallFrame, + ) bun.JSError!JSC.JSValue { + var value: *Body.Value = this.getBodyValue(); + + if (value.* == .Used) { + return handleBodyAlreadyUsed(globalObject); + } + + if (value.* == .Locked) { + if (value.Locked.action != .none or value.Locked.isDisturbed(Type, globalObject, callframe.this())) { + return handleBodyAlreadyUsed(globalObject); + } + value.toBlobIfPossible(); + if (value.* == .Locked) { + return value.Locked.setPromise(globalObject, .{ .getBytes = {} }); + } + } + + // toArrayBuffer in AnyBlob checks for non-UTF8 strings + var blob: AnyBlob = value.useAsAnyBlobAllowNonUTF8String(); + return JSC.JSPromise.wrap(globalObject, lifetimeWrap(AnyBlob.toUint8Array, .transfer), .{ &blob, globalObject }); + } + + pub fn getFormData( + this: *Type, + globalObject: *JSC.JSGlobalObject, + callframe: *JSC.CallFrame, + ) bun.JSError!JSC.JSValue { + var value: *Body.Value = this.getBodyValue(); + + if (value.* == .Used) { + return handleBodyAlreadyUsed(globalObject); + } + + if (value.* == .Locked) { + if (value.Locked.action != .none or value.Locked.isDisturbed(Type, globalObject, callframe.this())) { + return handleBodyAlreadyUsed(globalObject); + } + value.toBlobIfPossible(); + } + + var encoder = this.getFormDataEncoding() orelse { + // TODO: catch specific errors from getFormDataEncoding + return globalObject.ERR(.FORMDATA_PARSE_ERROR, "Can't decode form data from body because of incorrect MIME type/boundary", .{}).reject(); + }; + + if (value.* == .Locked) { + return value.Locked.setPromise(globalObject, .{ .getFormData = encoder }); + } + + var blob: AnyBlob = value.useAsAnyBlob(); + defer blob.detach(); + defer encoder.deinit(); + + const js_value = bun.FormData.toJS( + globalObject, + blob.slice(), + encoder.encoding, + ) catch |err| { + return globalObject.ERR( + .FORMDATA_PARSE_ERROR, + "FormData parse error {s}", + .{ + @errorName(err), + }, + ).reject(); + }; + + return JSC.JSPromise.wrapValue( + globalObject, + js_value, + ); + } + + pub fn getBlob( + this: *Type, + globalObject: *JSC.JSGlobalObject, + callframe: *JSC.CallFrame, + ) bun.JSError!JSC.JSValue { + return getBlobWithThisValue(this, globalObject, callframe.this()); + } + + pub fn getBlobWithThisValue( + this: *Type, + globalObject: *JSC.JSGlobalObject, + this_value: JSValue, + ) JSC.JSValue { + var value: *Body.Value = this.getBodyValue(); + + if (value.* == .Used) { + return handleBodyAlreadyUsed(globalObject); + } + + if (value.* == .Locked) { + if (value.Locked.action != .none or + ((this_value != .zero and value.Locked.isDisturbed(Type, globalObject, this_value)) or + (this_value == .zero and value.Locked.readable.isDisturbed(globalObject)))) + { + return handleBodyAlreadyUsed(globalObject); + } + + value.toBlobIfPossible(); + + if (value.* == .Locked) { + return value.Locked.setPromise(globalObject, .{ .getBlob = {} }); + } + } + + var blob = Blob.new(value.use()); + blob.allocator = bun.default_allocator; + if (blob.content_type.len == 0) { + if (this.getFetchHeaders()) |fetch_headers| { + if (fetch_headers.fastGet(.ContentType)) |content_type| { + var content_slice = content_type.toSlice(blob.allocator.?); + defer content_slice.deinit(); + var allocated = false; + const mimeType = MimeType.init(content_slice.slice(), blob.allocator.?, &allocated); + blob.content_type = mimeType.value; + blob.content_type_allocated = allocated; + blob.content_type_was_set = true; + if (blob.store != null) { + blob.store.?.mime_type = mimeType; + } + } + } + if (!blob.content_type_was_set and blob.store != null) { + blob.content_type = MimeType.text.value; + blob.content_type_allocated = false; + blob.content_type_was_set = true; + blob.store.?.mime_type = MimeType.text; + } + } + return JSC.JSPromise.resolvedPromiseValue(globalObject, blob.toJS(globalObject)); + } + + pub fn getBlobWithoutCallFrame( + this: *Type, + globalObject: *JSC.JSGlobalObject, + ) JSC.JSValue { + return getBlobWithThisValue(this, globalObject, .zero); + } + }; +} + +pub const ValueBufferer = struct { + const log = bun.Output.scoped(.BodyValueBufferer, false); + + const ArrayBufferSink = bun.webcore.Sink.ArrayBufferSink; + const Callback = *const fn (ctx: *anyopaque, bytes: []const u8, err: ?Body.Value.ValueError, is_async: bool) void; + + ctx: *anyopaque, + onFinishedBuffering: Callback, + + js_sink: ?*ArrayBufferSink.JSSink = null, + byte_stream: ?*JSC.WebCore.ByteStream = null, + // readable stream strong ref to keep byte stream alive + readable_stream_ref: JSC.WebCore.ReadableStream.Strong = .{}, + stream_buffer: bun.MutableString, + allocator: std.mem.Allocator, + global: *JSGlobalObject, + + pub fn deinit(this: *@This()) void { + this.stream_buffer.deinit(); + if (this.byte_stream) |byte_stream| { + byte_stream.unpipeWithoutDeref(); + } + this.readable_stream_ref.deinit(); + + if (this.js_sink) |buffer_stream| { + buffer_stream.detach(); + buffer_stream.sink.destroy(); + this.js_sink = null; + } + } + + pub fn init( + ctx: *anyopaque, + onFinish: Callback, + global: *JSGlobalObject, + allocator: std.mem.Allocator, + ) @This() { + const this: ValueBufferer = .{ + .ctx = ctx, + .onFinishedBuffering = onFinish, + .allocator = allocator, + .global = global, + .stream_buffer = .{ + .allocator = allocator, + .list = .{ + .items = &.{}, + .capacity = 0, + }, + }, + }; + return this; + } + + pub fn run(sink: *@This(), value: *JSC.WebCore.Body.Value) !void { + value.toBlobIfPossible(); + + switch (value.*) { + .Used => { + log("Used", .{}); + return error.StreamAlreadyUsed; + }, + .Empty, .Null => { + log("Empty", .{}); + return sink.onFinishedBuffering(sink.ctx, "", null, false); + }, + + .Error => |err| { + log("Error", .{}); + sink.onFinishedBuffering(sink.ctx, "", err, false); + return; + }, + // .InlineBlob, + .WTFStringImpl, + .InternalBlob, + .Blob, + => { + // toBlobIfPossible checks for WTFString needing a conversion. + var input = value.useAsAnyBlobAllowNonUTF8String(); + const is_pending = input.needsToReadFile(); + defer if (!is_pending) input.detach(); + + if (is_pending) { + input.Blob.doReadFileInternal(*@This(), sink, onFinishedLoadingFile, sink.global); + } else { + const bytes = input.slice(); + log("Blob {}", .{bytes.len}); + sink.onFinishedBuffering(sink.ctx, bytes, null, false); + } + return; + }, + .Locked => { + try sink.bufferLockedBodyValue(value); + }, + } + } + + fn onFinishedLoadingFile(sink: *@This(), bytes: Blob.read_file.ReadFileResultType) void { + switch (bytes) { + .err => |err| { + log("onFinishedLoadingFile Error", .{}); + sink.onFinishedBuffering(sink.ctx, "", .{ .SystemError = err }, true); + return; + }, + .result => |data| { + log("onFinishedLoadingFile Data {}", .{data.buf.len}); + sink.onFinishedBuffering(sink.ctx, data.buf, null, true); + if (data.is_temporary) { + bun.default_allocator.free(@constCast(data.buf)); + } + }, + } + } + fn onStreamPipe(sink: *@This(), stream: JSC.WebCore.streams.Result, allocator: std.mem.Allocator) void { + const stream_needs_deinit = stream == .owned or stream == .owned_and_done; + + defer { + if (stream_needs_deinit) { + if (stream == .owned_and_done) { + stream.owned_and_done.listManaged(allocator).deinit(); + } else { + stream.owned.listManaged(allocator).deinit(); + } + } + } + + const chunk = stream.slice(); + log("onStreamPipe chunk {}", .{chunk.len}); + _ = sink.stream_buffer.write(chunk) catch bun.outOfMemory(); + if (stream.isDone()) { + const bytes = sink.stream_buffer.list.items; + log("onStreamPipe done {}", .{bytes.len}); + sink.onFinishedBuffering(sink.ctx, bytes, null, true); + return; + } + } + + pub fn onResolveStream(_: *JSC.JSGlobalObject, callframe: *JSC.CallFrame) bun.JSError!JSC.JSValue { + var args = callframe.arguments_old(2); + var sink: *@This() = args.ptr[args.len - 1].asPromisePtr(@This()); + sink.handleResolveStream(true); + return JSValue.jsUndefined(); + } + + pub fn onRejectStream(_: *JSC.JSGlobalObject, callframe: *JSC.CallFrame) bun.JSError!JSC.JSValue { + const args = callframe.arguments_old(2); + var sink = args.ptr[args.len - 1].asPromisePtr(@This()); + const err = args.ptr[0]; + sink.handleRejectStream(err, true); + return JSValue.jsUndefined(); + } + + fn handleRejectStream(sink: *@This(), err: JSValue, is_async: bool) void { + if (sink.js_sink) |wrapper| { + wrapper.detach(); + sink.js_sink = null; + wrapper.sink.destroy(); + } + var ref = JSC.Strong.create(err, sink.global); + defer ref.deinit(); + sink.onFinishedBuffering(sink.ctx, "", .{ .JSValue = ref }, is_async); + } + + fn handleResolveStream(sink: *@This(), is_async: bool) void { + if (sink.js_sink) |wrapper| { + const bytes = wrapper.sink.bytes.slice(); + log("handleResolveStream {}", .{bytes.len}); + sink.onFinishedBuffering(sink.ctx, bytes, null, is_async); + } else { + log("handleResolveStream no sink", .{}); + sink.onFinishedBuffering(sink.ctx, "", null, is_async); + } + } + + fn createJSSink(sink: *@This(), stream: JSC.WebCore.ReadableStream) !void { + stream.value.ensureStillAlive(); + var allocator = sink.allocator; + var buffer_stream = try allocator.create(ArrayBufferSink.JSSink); + var globalThis = sink.global; + buffer_stream.* = ArrayBufferSink.JSSink{ + .sink = ArrayBufferSink{ + .bytes = bun.ByteList.init(&.{}), + .allocator = allocator, + .next = null, + }, + }; + var signal = &buffer_stream.sink.signal; + sink.js_sink = buffer_stream; + + signal.* = ArrayBufferSink.JSSink.SinkSignal.init(JSValue.zero); + + // explicitly set it to a dead pointer + // we use this memory address to disable signals being sent + signal.clear(); + assert(signal.isDead()); + + const assignment_result: JSValue = ArrayBufferSink.JSSink.assignToStream( + globalThis, + stream.value, + buffer_stream, + @as(**anyopaque, @ptrCast(&signal.ptr)), + ); + + assignment_result.ensureStillAlive(); + + // assert that it was updated + assert(!signal.isDead()); + + if (assignment_result.isError()) { + return error.PipeFailed; + } + + if (!assignment_result.isEmptyOrUndefinedOrNull()) { + assignment_result.ensureStillAlive(); + // it returns a Promise when it goes through ReadableStreamDefaultReader + if (assignment_result.asAnyPromise()) |promise| { + switch (promise.status(globalThis.vm())) { + .Pending => { + assignment_result.then( + globalThis, + sink, + onResolveStream, + onRejectStream, + ); + }, + .Fulfilled => { + defer stream.value.unprotect(); + + sink.handleResolveStream(false); + }, + .Rejected => { + defer stream.value.unprotect(); + + sink.handleRejectStream(promise.result(globalThis.vm()), false); + }, + } + return; + } + } + + return error.PipeFailed; + } + + fn bufferLockedBodyValue(sink: *@This(), value: *JSC.WebCore.Body.Value) !void { + assert(value.* == .Locked); + const locked = &value.Locked; + if (locked.readable.get(sink.global)) |stream| { + // keep the stream alive until we're done with it + sink.readable_stream_ref = locked.readable; + value.* = .{ .Used = {} }; + + if (stream.isLocked(sink.global)) { + return error.StreamAlreadyUsed; + } + + switch (stream.ptr) { + .Invalid => { + return error.InvalidStream; + }, + // toBlobIfPossible should've caught this + .Blob, .File => unreachable, + .JavaScript, .Direct => { + // this is broken right now + // return sink.createJSSink(stream); + return error.UnsupportedStreamType; + }, + .Bytes => |byte_stream| { + assert(byte_stream.pipe.ctx == null); + assert(sink.byte_stream == null); + + const bytes = byte_stream.buffer.items; + // If we've received the complete body by the time this function is called + // we can avoid streaming it and just send it all at once. + if (byte_stream.has_received_last_chunk) { + log("byte stream has_received_last_chunk {}", .{bytes.len}); + sink.onFinishedBuffering(sink.ctx, bytes, null, false); + // is safe to detach here because we're not going to receive any more data + stream.done(sink.global); + return; + } + + byte_stream.pipe = JSC.WebCore.Pipe.Wrap(@This(), onStreamPipe).init(sink); + sink.byte_stream = byte_stream; + log("byte stream pre-buffered {}", .{bytes.len}); + + _ = sink.stream_buffer.write(bytes) catch bun.outOfMemory(); + return; + }, + } + } + + if (locked.onReceiveValue != null or locked.task != null) { + // someone else is waiting for the stream or waiting for `onStartStreaming` + const readable = value.toReadableStream(sink.global); + readable.ensureStillAlive(); + readable.protect(); + return try sink.bufferLockedBodyValue(value); + } + // is safe to wait it buffer + locked.task = @ptrCast(sink); + locked.onReceiveValue = @This().onReceiveValue; + } + + fn onReceiveValue(ctx: *anyopaque, value: *JSC.WebCore.Body.Value) void { + const sink = bun.cast(*@This(), ctx); + switch (value.*) { + .Error => |err| { + log("onReceiveValue Error", .{}); + sink.onFinishedBuffering(sink.ctx, "", err, true); + return; + }, + else => { + value.toBlobIfPossible(); + var input = value.useAsAnyBlobAllowNonUTF8String(); + const bytes = input.slice(); + log("onReceiveValue {}", .{bytes.len}); + sink.onFinishedBuffering(sink.ctx, bytes, null, true); + }, + } + } + + comptime { + const jsonResolveStream = JSC.toJSHostFn(onResolveStream); + @export(&jsonResolveStream, .{ .name = "Bun__BodyValueBufferer__onResolveStream" }); + const jsonRejectStream = JSC.toJSHostFn(onRejectStream); + @export(&jsonRejectStream, .{ .name = "Bun__BodyValueBufferer__onRejectStream" }); + } +}; + +const assert = bun.assert; + +const std = @import("std"); +const Api = @import("../../api/schema.zig").Api; +const bun = @import("bun"); +const MimeType = bun.http.MimeType; +const ZigURL = @import("../../url.zig").URL; +const HTTPClient = bun.http; +const JSC = bun.JSC; + +const Method = @import("../../http/method.zig").Method; +const FetchHeaders = bun.webcore.FetchHeaders; +const ObjectPool = @import("../../pool.zig").ObjectPool; +const SystemError = JSC.SystemError; +const Output = bun.Output; +const MutableString = bun.MutableString; +const strings = bun.strings; +const string = bun.string; +const default_allocator = bun.default_allocator; +const FeatureFlags = bun.FeatureFlags; +const ArrayBuffer = JSC.ArrayBuffer; + +const Environment = @import("../../env.zig"); +const ZigString = JSC.ZigString; +const IdentityContext = @import("../../identity_context.zig").IdentityContext; +const JSPromise = JSC.JSPromise; +const JSValue = JSC.JSValue; +const JSGlobalObject = JSC.JSGlobalObject; +const NullableAllocator = bun.NullableAllocator; + +const VirtualMachine = JSC.VirtualMachine; +const Task = JSC.Task; +const JSPrinter = bun.js_printer; +const picohttp = bun.picohttp; +const StringJoiner = bun.StringJoiner; +const uws = bun.uws; + +const Blob = JSC.WebCore.Blob; +const AnyBlob = Blob.Any; +const InternalBlob = Blob.Internal; +const Response = JSC.WebCore.Response; +const Request = JSC.WebCore.Request; +const streams = JSC.WebCore.streams; diff --git a/src/bun.js/webcore/ByteBlobLoader.zig b/src/bun.js/webcore/ByteBlobLoader.zig new file mode 100644 index 0000000000..90e14899c1 --- /dev/null +++ b/src/bun.js/webcore/ByteBlobLoader.zig @@ -0,0 +1,164 @@ +const ByteBlobLoader = @This(); + +offset: Blob.SizeType = 0, +store: ?*Blob.Store = null, +chunk_size: Blob.SizeType = 1024 * 1024 * 2, +remain: Blob.SizeType = 1024 * 1024 * 2, +done: bool = false, +pulled: bool = false, + +pub const tag = webcore.ReadableStream.Tag.Blob; +pub const Source = webcore.ReadableStream.NewSource( + @This(), + "Blob", + onStart, + onPull, + onCancel, + deinit, + null, + drain, + memoryCost, + toBufferedValue, +); + +pub fn parent(this: *@This()) *Source { + return @fieldParentPtr("context", this); +} + +pub fn setup( + this: *ByteBlobLoader, + blob: *const Blob, + user_chunk_size: Blob.SizeType, +) void { + blob.store.?.ref(); + var blobe = blob.*; + blobe.resolveSize(); + this.* = ByteBlobLoader{ + .offset = blobe.offset, + .store = blobe.store.?, + .chunk_size = @min( + if (user_chunk_size > 0) @min(user_chunk_size, blobe.size) else blobe.size, + 1024 * 1024 * 2, + ), + .remain = blobe.size, + .done = false, + }; +} + +pub fn onStart(this: *ByteBlobLoader) streams.Start { + return .{ .chunk_size = this.chunk_size }; +} + +pub fn onPull(this: *ByteBlobLoader, buffer: []u8, array: JSValue) streams.Result { + array.ensureStillAlive(); + defer array.ensureStillAlive(); + this.pulled = true; + const store = this.store orelse return .{ .done = {} }; + if (this.done) { + return .{ .done = {} }; + } + + var temporary = store.sharedView(); + temporary = temporary[@min(this.offset, temporary.len)..]; + + temporary = temporary[0..@min(buffer.len, @min(temporary.len, this.remain))]; + if (temporary.len == 0) { + this.clearStore(); + this.done = true; + return .{ .done = {} }; + } + + const copied = @as(Blob.SizeType, @intCast(temporary.len)); + + this.remain -|= copied; + this.offset +|= copied; + bun.assert(buffer.ptr != temporary.ptr); + @memcpy(buffer[0..temporary.len], temporary); + if (this.remain == 0) { + return .{ .into_array_and_done = .{ .value = array, .len = copied } }; + } + + return .{ .into_array = .{ .value = array, .len = copied } }; +} + +pub fn toAnyBlob(this: *ByteBlobLoader, globalThis: *JSGlobalObject) ?Blob.Any { + if (this.store) |store| { + _ = this.detachStore(); + if (this.offset == 0 and this.remain == store.size()) { + if (store.toAnyBlob()) |blob| { + defer store.deref(); + return blob; + } + } + + var blob = Blob.initWithStore(store, globalThis); + blob.offset = this.offset; + blob.size = this.remain; + this.parent().is_closed = true; + return .{ .Blob = blob }; + } + return null; +} + +pub fn detachStore(this: *ByteBlobLoader) ?*Blob.Store { + if (this.store) |store| { + this.store = null; + this.done = true; + return store; + } + return null; +} + +pub fn onCancel(this: *ByteBlobLoader) void { + this.clearStore(); +} + +pub fn deinit(this: *ByteBlobLoader) void { + this.clearStore(); + this.parent().deinit(); +} + +fn clearStore(this: *ByteBlobLoader) void { + if (this.store) |store| { + this.store = null; + store.deref(); + } +} + +pub fn drain(this: *ByteBlobLoader) bun.ByteList { + const store = this.store orelse return .{}; + var temporary = store.sharedView(); + temporary = temporary[this.offset..]; + temporary = temporary[0..@min(16384, @min(temporary.len, this.remain))]; + + const cloned = bun.ByteList.init(temporary).listManaged(bun.default_allocator).clone() catch bun.outOfMemory(); + this.offset +|= @as(Blob.SizeType, @truncate(cloned.items.len)); + this.remain -|= @as(Blob.SizeType, @truncate(cloned.items.len)); + + return bun.ByteList.fromList(cloned); +} + +pub fn toBufferedValue(this: *ByteBlobLoader, globalThis: *JSGlobalObject, action: streams.BufferAction.Tag) bun.JSError!JSValue { + if (this.toAnyBlob(globalThis)) |blob_| { + var blob = blob_; + return blob.toPromise(globalThis, action); + } + + return .zero; +} + +pub fn memoryCost(this: *const ByteBlobLoader) usize { + // ReadableStreamSource covers @sizeOf(FileReader) + if (this.store) |store| { + return store.memoryCost(); + } + return 0; +} + +const bun = @import("bun"); +const jsc = bun.jsc; +const webcore = bun.webcore; +const streams = webcore.streams; +const Blob = webcore.Blob; +const JSGlobalObject = jsc.JSGlobalObject; +const JSValue = jsc.JSValue; diff --git a/src/bun.js/webcore/ByteStream.zig b/src/bun.js/webcore/ByteStream.zig new file mode 100644 index 0000000000..de09b5a6ef --- /dev/null +++ b/src/bun.js/webcore/ByteStream.zig @@ -0,0 +1,459 @@ +const ByteStream = @This(); + +buffer: std.ArrayList(u8) = .{ + .allocator = bun.default_allocator, + .items = &.{}, + .capacity = 0, +}, +has_received_last_chunk: bool = false, +pending: streams.Result.Pending = .{ .result = .{ .done = {} } }, +done: bool = false, +pending_buffer: []u8 = &.{}, +pending_value: jsc.Strong = .empty, +offset: usize = 0, +highWaterMark: Blob.SizeType = 0, +pipe: Pipe = .{}, +size_hint: Blob.SizeType = 0, +buffer_action: ?BufferAction = null, + +pub const Source = webcore.ReadableStream.NewSource( + @This(), + "Bytes", + onStart, + onPull, + onCancel, + deinit, + null, + drain, + memoryCost, + toBufferedValue, +); + +const log = Output.scoped(.ByteStream, false); + +pub const tag = webcore.ReadableStream.Tag.Bytes; + +pub fn setup(this: *ByteStream) void { + this.* = .{}; +} + +pub fn onStart(this: *@This()) streams.Start { + if (this.has_received_last_chunk and this.buffer.items.len == 0) { + return .{ .empty = {} }; + } + + if (this.has_received_last_chunk) { + return .{ .owned_and_done = bun.ByteList.fromList(this.buffer.moveToUnmanaged()) }; + } + + if (this.highWaterMark == 0) { + return .{ .ready = {} }; + } + + // For HTTP, the maximum streaming response body size will be 512 KB. + // #define LIBUS_RECV_BUFFER_LENGTH 524288 + // For HTTPS, the size is probably quite a bit lower like 64 KB due to TLS transmission. + // We add 1 extra page size so that if there's a little bit of excess buffered data, we avoid extra allocations. + const page_size: Blob.SizeType = @intCast(std.heap.pageSize()); + return .{ .chunk_size = @min(512 * 1024 + page_size, @max(this.highWaterMark, page_size)) }; +} + +pub fn value(this: *@This()) JSValue { + const result = this.pending_value.get() orelse { + return .zero; + }; + this.pending_value.clearWithoutDeallocation(); + return result; +} + +pub fn isCancelled(this: *const @This()) bool { + return this.parent().cancelled; +} + +pub fn unpipeWithoutDeref(this: *@This()) void { + this.pipe.ctx = null; + this.pipe.onPipe = null; +} + +pub fn onData( + this: *@This(), + stream: streams.Result, + allocator: std.mem.Allocator, +) void { + jsc.markBinding(@src()); + if (this.done) { + if (stream.isDone() and (stream == .owned or stream == .owned_and_done)) { + if (stream == .owned) allocator.free(stream.owned.slice()); + if (stream == .owned_and_done) allocator.free(stream.owned_and_done.slice()); + } + this.has_received_last_chunk = stream.isDone(); + + log("ByteStream.onData already done... do nothing", .{}); + + return; + } + + bun.assert(!this.has_received_last_chunk or stream == .err); + this.has_received_last_chunk = stream.isDone(); + + if (this.pipe.ctx) |ctx| { + this.pipe.onPipe.?(ctx, stream, allocator); + return; + } + + const chunk = stream.slice(); + + if (this.buffer_action) |*action| { + if (stream == .err) { + defer { + this.buffer.clearAndFree(); + this.pending.result.deinit(); + this.pending.result = .{ .done = {} }; + this.buffer_action = null; + } + + log("ByteStream.onData err action.reject()", .{}); + + action.reject(this.parent().globalThis, stream.err); + return; + } + + if (this.has_received_last_chunk) { + defer { + this.buffer_action = null; + } + + if (this.buffer.capacity == 0 and stream == .done) { + log("ByteStream.onData done and action.fulfill()", .{}); + + var blob = this.toAnyBlob().?; + action.fulfill(this.parent().globalThis, &blob); + return; + } + if (this.buffer.capacity == 0 and stream == .owned_and_done) { + log("ByteStream.onData owned_and_done and action.fulfill()", .{}); + + this.buffer = std.ArrayList(u8).fromOwnedSlice(bun.default_allocator, @constCast(chunk)); + var blob = this.toAnyBlob().?; + action.fulfill(this.parent().globalThis, &blob); + return; + } + defer { + if (stream == .owned_and_done or stream == .owned) { + allocator.free(stream.slice()); + } + } + log("ByteStream.onData appendSlice and action.fulfill()", .{}); + + this.buffer.appendSlice(chunk) catch bun.outOfMemory(); + var blob = this.toAnyBlob().?; + action.fulfill(this.parent().globalThis, &blob); + + return; + } else { + this.buffer.appendSlice(chunk) catch bun.outOfMemory(); + + if (stream == .owned_and_done or stream == .owned) { + allocator.free(stream.slice()); + } + } + + return; + } + + if (this.pending.state == .pending) { + bun.assert(this.buffer.items.len == 0); + const to_copy = this.pending_buffer[0..@min(chunk.len, this.pending_buffer.len)]; + const pending_buffer_len = this.pending_buffer.len; + bun.assert(to_copy.ptr != chunk.ptr); + @memcpy(to_copy, chunk[0..to_copy.len]); + this.pending_buffer = &.{}; + + const is_really_done = this.has_received_last_chunk and to_copy.len <= pending_buffer_len; + + if (is_really_done) { + this.done = true; + + if (to_copy.len == 0) { + if (stream == .err) { + this.pending.result = .{ + .err = stream.err, + }; + } else { + this.pending.result = .{ + .done = {}, + }; + } + } else { + this.pending.result = .{ + .into_array_and_done = .{ + .value = this.value(), + .len = @as(Blob.SizeType, @truncate(to_copy.len)), + }, + }; + } + } else { + this.pending.result = .{ + .into_array = .{ + .value = this.value(), + .len = @as(Blob.SizeType, @truncate(to_copy.len)), + }, + }; + } + + const remaining = chunk[to_copy.len..]; + if (remaining.len > 0 and chunk.len > 0) + this.append(stream, to_copy.len, chunk, allocator) catch @panic("Out of memory while copying request body"); + + log("ByteStream.onData pending.run()", .{}); + + this.pending.run(); + + return; + } + + log("ByteStream.onData no action just append", .{}); + + this.append(stream, 0, chunk, allocator) catch @panic("Out of memory while copying request body"); +} + +pub fn append( + this: *@This(), + stream: streams.Result, + offset: usize, + base_address: []const u8, + allocator: std.mem.Allocator, +) !void { + const chunk = stream.slice()[offset..]; + + if (this.buffer.capacity == 0) { + switch (stream) { + .owned => |owned| { + this.buffer = owned.listManaged(allocator); + this.offset += offset; + }, + .owned_and_done => |owned| { + this.buffer = owned.listManaged(allocator); + this.offset += offset; + }, + .temporary_and_done, .temporary => { + this.buffer = try std.ArrayList(u8).initCapacity(bun.default_allocator, chunk.len); + this.buffer.appendSliceAssumeCapacity(chunk); + }, + .err => { + this.pending.result = .{ .err = stream.err }; + }, + .done => {}, + else => unreachable, + } + return; + } + + switch (stream) { + .temporary_and_done, .temporary => { + try this.buffer.appendSlice(chunk); + }, + .owned_and_done, .owned => { + try this.buffer.appendSlice(chunk); + allocator.free(@constCast(base_address)); + }, + .err => { + if (this.buffer_action != null) { + @panic("Expected buffer action to be null"); + } + + this.pending.result = .{ .err = stream.err }; + }, + .done => {}, + // We don't support the rest of these yet + else => unreachable, + } + + return; +} + +pub fn setValue(this: *@This(), view: jsc.JSValue) void { + jsc.markBinding(@src()); + this.pending_value.set(this.parent().globalThis, view); +} + +pub fn parent(this: *@This()) *Source { + return @fieldParentPtr("context", this); +} + +pub fn onPull(this: *@This(), buffer: []u8, view: jsc.JSValue) streams.Result { + jsc.markBinding(@src()); + bun.assert(buffer.len > 0); + bun.debugAssert(this.buffer_action == null); + + if (this.buffer.items.len > 0) { + bun.assert(this.value() == .zero); + const to_write = @min( + this.buffer.items.len - this.offset, + buffer.len, + ); + const remaining_in_buffer = this.buffer.items[this.offset..][0..to_write]; + + @memcpy(buffer[0..to_write], this.buffer.items[this.offset..][0..to_write]); + + if (this.offset + to_write == this.buffer.items.len) { + this.offset = 0; + this.buffer.items.len = 0; + } else { + this.offset += to_write; + } + + if (this.has_received_last_chunk and remaining_in_buffer.len == 0) { + this.buffer.clearAndFree(); + this.done = true; + + return .{ + .into_array_and_done = .{ + .value = view, + .len = @as(Blob.SizeType, @truncate(to_write)), + }, + }; + } + + return .{ + .into_array = .{ + .value = view, + .len = @as(Blob.SizeType, @truncate(to_write)), + }, + }; + } + + if (this.has_received_last_chunk) { + return .{ + .done = {}, + }; + } + + this.pending_buffer = buffer; + this.setValue(view); + + return .{ + .pending = &this.pending, + }; +} + +pub fn onCancel(this: *@This()) void { + jsc.markBinding(@src()); + const view = this.value(); + if (this.buffer.capacity > 0) this.buffer.clearAndFree(); + this.done = true; + this.pending_value.deinit(); + + if (view != .zero) { + this.pending_buffer = &.{}; + this.pending.result.deinit(); + this.pending.result = .{ .done = {} }; + this.pending.run(); + } + + if (this.buffer_action) |*action| { + const global = this.parent().globalThis; + action.reject(global, .{ .AbortReason = .UserAbort }); + this.buffer_action = null; + } +} + +pub fn memoryCost(this: *const @This()) usize { + // ReadableStreamSource covers @sizeOf(ByteStream) + return this.buffer.capacity; +} + +pub fn deinit(this: *@This()) void { + jsc.markBinding(@src()); + if (this.buffer.capacity > 0) this.buffer.clearAndFree(); + + this.pending_value.deinit(); + if (!this.done) { + this.done = true; + + this.pending_buffer = &.{}; + this.pending.result.deinit(); + this.pending.result = .{ .done = {} }; + this.pending.run(); + } + if (this.buffer_action) |*action| { + action.deinit(); + } + this.parent().deinit(); +} + +pub fn drain(this: *@This()) bun.ByteList { + if (this.buffer.items.len > 0) { + const out = bun.ByteList.fromList(this.buffer); + this.buffer = .{ + .allocator = bun.default_allocator, + .items = &.{}, + .capacity = 0, + }; + + return out; + } + + return .{}; +} + +pub fn toAnyBlob(this: *@This()) ?Blob.Any { + if (this.has_received_last_chunk) { + const buffer = this.buffer; + this.buffer = .{ + .allocator = bun.default_allocator, + .items = &.{}, + .capacity = 0, + }; + this.done = true; + this.pending.result.deinit(); + this.pending.result = .{ .done = {} }; + this.parent().is_closed = true; + return .{ .InternalBlob = .{ + .bytes = buffer, + .was_string = false, + } }; + } + + return null; +} + +pub fn toBufferedValue(this: *@This(), globalThis: *jsc.JSGlobalObject, action: streams.BufferAction.Tag) bun.JSError!jsc.JSValue { + if (this.buffer_action != null) { + return globalThis.throw("Cannot buffer value twice", .{}); + } + + if (this.pending.result == .err) { + const err, _ = this.pending.result.err.toJSWeak(globalThis); + this.pending.result.deinit(); + this.done = true; + this.buffer.clearAndFree(); + return jsc.JSPromise.dangerouslyCreateRejectedPromiseValueWithoutNotifyingVM(globalThis, err); + } + + if (this.toAnyBlob()) |blob_| { + var blob = blob_; + return blob.toPromise(globalThis, action); + } + + this.buffer_action = switch (action) { + .blob => .{ .blob = .init(globalThis) }, + .bytes => .{ .bytes = .init(globalThis) }, + .arrayBuffer => .{ .arrayBuffer = .init(globalThis) }, + .json => .{ .json = .init(globalThis) }, + .text => .{ .text = .init(globalThis) }, + }; + + return this.buffer_action.?.value(); +} + +const std = @import("std"); +const bun = @import("bun"); +const Output = bun.Output; +const webcore = bun.webcore; +const streams = webcore.streams; +const jsc = bun.jsc; +const Blob = webcore.Blob; +const Pipe = webcore.Pipe; +const BufferAction = streams.BufferAction; +const JSValue = jsc.JSValue; diff --git a/src/bun.js/webcore/Crypto.zig b/src/bun.js/webcore/Crypto.zig new file mode 100644 index 0000000000..6c1ecf311e --- /dev/null +++ b/src/bun.js/webcore/Crypto.zig @@ -0,0 +1,198 @@ +const Crypto = @This(); + +pub const js = JSC.Codegen.JSCrypto; +pub const toJS = js.toJS; +pub const fromJS = js.fromJS; +pub const fromJSDirect = js.fromJSDirect; + +garbage: i32 = 0, + +comptime { + _ = CryptoObject__create; +} + +const BoringSSL = bun.BoringSSL.c; + +fn throwInvalidParameter(globalThis: *JSC.JSGlobalObject) bun.JSError { + return globalThis.ERR(.CRYPTO_SCRYPT_INVALID_PARAMETER, "Invalid scrypt parameters", .{}).throw(); +} + +fn throwInvalidParams(globalThis: *JSC.JSGlobalObject, comptime error_type: @Type(.enum_literal), comptime message: [:0]const u8, fmt: anytype) bun.JSError { + if (error_type != .RangeError) @compileError("Error type not added!"); + BoringSSL.ERR_clear_error(); + return globalThis.ERR(.CRYPTO_INVALID_SCRYPT_PARAMS, message, fmt).throw(); +} + +pub fn timingSafeEqual(_: *@This(), global: *JSC.JSGlobalObject, callframe: *JSC.CallFrame) bun.JSError!JSC.JSValue { + return JSC.Node.crypto.timingSafeEqual(global, callframe); +} + +pub fn timingSafeEqualWithoutTypeChecks( + _: *@This(), + globalThis: *JSC.JSGlobalObject, + array_a: *JSC.JSUint8Array, + array_b: *JSC.JSUint8Array, +) JSC.JSValue { + const a = array_a.slice(); + const b = array_b.slice(); + + const len = a.len; + if (b.len != len) { + return globalThis.ERR(.CRYPTO_TIMING_SAFE_EQUAL_LENGTH, "Input buffers must have the same byte length", .{}).throw(); + } + + return JSC.jsBoolean(bun.BoringSSL.c.CRYPTO_memcmp(a.ptr, b.ptr, len) == 0); +} + +pub fn getRandomValues( + _: *@This(), + globalThis: *JSC.JSGlobalObject, + callframe: *JSC.CallFrame, +) bun.JSError!JSC.JSValue { + const arguments = callframe.arguments_old(1).slice(); + if (arguments.len == 0) { + return globalThis.throwInvalidArguments("Expected typed array but got nothing", .{}); + } + + var array_buffer = arguments[0].asArrayBuffer(globalThis) orelse { + return globalThis.throwInvalidArguments("Expected typed array but got {s}", .{@tagName(arguments[0].jsType())}); + }; + const slice = array_buffer.byteSlice(); + + randomData(globalThis, slice.ptr, slice.len); + + return arguments[0]; +} + +pub fn getRandomValuesWithoutTypeChecks( + _: *@This(), + globalThis: *JSC.JSGlobalObject, + array: *JSC.JSUint8Array, +) JSC.JSValue { + const slice = array.slice(); + randomData(globalThis, slice.ptr, slice.len); + return @as(JSC.JSValue, @enumFromInt(@as(i64, @bitCast(@intFromPtr(array))))); +} + +fn randomData( + globalThis: *JSC.JSGlobalObject, + ptr: [*]u8, + len: usize, +) void { + const slice = ptr[0..len]; + + switch (slice.len) { + 0 => {}, + // 512 bytes or less we reuse from the same cache as UUID generation. + 1...JSC.RareData.EntropyCache.size / 8 => { + bun.copy(u8, slice, globalThis.bunVM().rareData().entropySlice(slice.len)); + }, + else => { + bun.csprng(slice); + }, + } +} + +pub fn randomUUID( + _: *@This(), + globalThis: *JSC.JSGlobalObject, + _: *JSC.CallFrame, +) bun.JSError!JSC.JSValue { + var str, var bytes = bun.String.createUninitialized(.latin1, 36); + + const uuid = globalThis.bunVM().rareData().nextUUID(); + + uuid.print(bytes[0..36]); + return str.transferToJS(globalThis); +} + +comptime { + const Bun__randomUUIDv7 = JSC.toJSHostFn(Bun__randomUUIDv7_); + @export(&Bun__randomUUIDv7, .{ .name = "Bun__randomUUIDv7" }); +} +pub fn Bun__randomUUIDv7_(globalThis: *JSC.JSGlobalObject, callframe: *JSC.CallFrame) bun.JSError!JSC.JSValue { + const arguments = callframe.argumentsUndef(2).slice(); + + var encoding_value: JSC.JSValue = .undefined; + + const encoding: JSC.Node.Encoding = brk: { + if (arguments.len > 0) { + if (arguments[0] != .undefined) { + if (arguments[0].isString()) { + encoding_value = arguments[0]; + break :brk try JSC.Node.Encoding.fromJS(encoding_value, globalThis) orelse { + return globalThis.ERR(.UNKNOWN_ENCODING, "Encoding must be one of base64, base64url, hex, or buffer", .{}).throw(); + }; + } + } + } + + break :brk JSC.Node.Encoding.hex; + }; + + const timestamp: u64 = brk: { + const timestamp_value: JSC.JSValue = if (encoding_value != .undefined and arguments.len > 1) + arguments[1] + else if (arguments.len == 1 and encoding_value == .undefined) + arguments[0] + else + .undefined; + + if (timestamp_value != .undefined) { + if (timestamp_value.isDate()) { + const date = timestamp_value.getUnixTimestamp(); + break :brk @intFromFloat(@max(0, date)); + } + break :brk @intCast(try globalThis.validateIntegerRange(timestamp_value, i64, 0, .{ .min = 0, .field_name = "timestamp" })); + } + + break :brk @intCast(@max(0, std.time.milliTimestamp())); + }; + + const entropy = globalThis.bunVM().rareData().entropySlice(8); + + const uuid = UUID7.init(timestamp, &entropy[0..8].*); + + if (encoding == .hex) { + var str, var bytes = bun.String.createUninitialized(.latin1, 36); + uuid.print(bytes[0..36]); + return str.transferToJS(globalThis); + } + + return encoding.encodeWithMaxSize(globalThis, 32, &uuid.bytes); +} + +pub fn randomUUIDWithoutTypeChecks( + _: *Crypto, + globalThis: *JSC.JSGlobalObject, +) JSC.JSValue { + const str, var bytes = bun.String.createUninitialized(.latin1, 36); + defer str.deref(); + + // randomUUID must have been called already many times before this kicks + // in so we can skip the rare_data pointer check. + const uuid = globalThis.bunVM().rare_data.?.nextUUID(); + + uuid.print(bytes[0..36]); + return str.toJS(globalThis); +} + +pub fn constructor(globalThis: *JSC.JSGlobalObject, _: *JSC.CallFrame) bun.JSError!*Crypto { + return JSC.Error.ILLEGAL_CONSTRUCTOR.throw(globalThis, "Crypto is not constructable", .{}); +} + +pub export fn CryptoObject__create(globalThis: *JSC.JSGlobalObject) JSC.JSValue { + JSC.markBinding(@src()); + + var ptr = bun.default_allocator.create(Crypto) catch { + return globalThis.throwOutOfMemoryValue(); + }; + + return ptr.toJS(globalThis); +} + +const UUID7 = @import("../uuid.zig").UUID7; + +const std = @import("std"); +const bun = @import("bun"); +const JSC = bun.jsc; diff --git a/src/bun.js/webcore/FileReader.zig b/src/bun.js/webcore/FileReader.zig new file mode 100644 index 0000000000..0657ed7688 --- /dev/null +++ b/src/bun.js/webcore/FileReader.zig @@ -0,0 +1,667 @@ +const FileReader = @This(); +const log = Output.scoped(.FileReader, false); + +reader: IOReader = IOReader.init(FileReader), +done: bool = false, +pending: streams.Result.Pending = .{}, +pending_value: JSC.Strong = .empty, +pending_view: []u8 = &.{}, +fd: bun.FileDescriptor = bun.invalid_fd, +start_offset: ?usize = null, +max_size: ?usize = null, +total_readed: usize = 0, +started: bool = false, +waiting_for_onReaderDone: bool = false, +event_loop: JSC.EventLoopHandle, +lazy: Lazy = .{ .none = {} }, +buffered: std.ArrayListUnmanaged(u8) = .{}, +read_inside_on_pull: ReadDuringJSOnPullResult = .{ .none = {} }, +highwater_mark: usize = 16384, + +pub const IOReader = bun.io.BufferedReader; +pub const Poll = IOReader; +pub const tag = ReadableStream.Tag.File; + +const ReadDuringJSOnPullResult = union(enum) { + none: void, + js: []u8, + amount_read: usize, + temporary: []const u8, + use_buffered: usize, +}; + +pub const Lazy = union(enum) { + none: void, + blob: *Blob.Store, + + const OpenedFileBlob = struct { + fd: bun.FileDescriptor, + pollable: bool = false, + nonblocking: bool = true, + file_type: bun.io.FileType = .file, + }; + + pub extern "c" fn open_as_nonblocking_tty(i32, i32) i32; + pub fn openFileBlob(file: *Blob.Store.File) JSC.Maybe(OpenedFileBlob) { + var this = OpenedFileBlob{ .fd = bun.invalid_fd }; + var file_buf: bun.PathBuffer = undefined; + var is_nonblocking = false; + + const fd: bun.FD = if (file.pathlike == .fd) + if (file.pathlike.fd.stdioTag() != null) brk: { + if (comptime Environment.isPosix) { + const rc = open_as_nonblocking_tty(file.pathlike.fd.native(), bun.O.RDONLY); + if (rc > -1) { + is_nonblocking = true; + file.is_atty = true; + break :brk .fromNative(rc); + } + } + break :brk file.pathlike.fd; + } else brk: { + const duped = bun.sys.dupWithFlags(file.pathlike.fd, 0); + + if (duped != .result) { + return .{ .err = duped.err.withFd(file.pathlike.fd) }; + } + + const fd: bun.FD = duped.result; + if (comptime Environment.isPosix) { + if (fd.stdioTag() == null) { + is_nonblocking = switch (fd.getFcntlFlags()) { + .result => |flags| (flags & bun.O.NONBLOCK) != 0, + .err => false, + }; + } + } + + break :brk switch (fd.makeLibUVOwnedForSyscall(.dup, .close_on_fail)) { + .result => |owned_fd| owned_fd, + .err => |err| { + return .{ .err = err }; + }, + }; + } + else switch (bun.sys.open(file.pathlike.path.sliceZ(&file_buf), bun.O.RDONLY | bun.O.NONBLOCK | bun.O.CLOEXEC, 0)) { + .result => |fd| fd, + .err => |err| { + return .{ .err = err.withPath(file.pathlike.path.slice()) }; + }, + }; + + if (comptime Environment.isPosix) { + if ((file.is_atty orelse false) or + (fd.stdioTag() != null and std.posix.isatty(fd.cast())) or + (file.pathlike == .fd and + file.pathlike.fd.stdioTag() != null and + std.posix.isatty(file.pathlike.fd.cast()))) + { + // var termios = std.mem.zeroes(std.posix.termios); + // _ = std.c.tcgetattr(fd.cast(), &termios); + // bun.C.cfmakeraw(&termios); + // _ = std.c.tcsetattr(fd.cast(), std.posix.TCSA.NOW, &termios); + file.is_atty = true; + } + + const stat: bun.Stat = switch (bun.sys.fstat(fd)) { + .result => |result| result, + .err => |err| { + fd.close(); + return .{ .err = err }; + }, + }; + + if (bun.S.ISDIR(stat.mode)) { + bun.Async.Closer.close(fd, {}); + return .{ .err = .fromCode(.ISDIR, .fstat) }; + } + + this.pollable = bun.sys.isPollable(stat.mode) or is_nonblocking or (file.is_atty orelse false); + this.file_type = if (bun.S.ISFIFO(stat.mode)) + .pipe + else if (bun.S.ISSOCK(stat.mode)) + .socket + else + .file; + + // pretend it's a non-blocking pipe if it's a TTY + if (is_nonblocking and this.file_type != .socket) { + this.file_type = .nonblocking_pipe; + } + + this.nonblocking = is_nonblocking or (this.pollable and !(file.is_atty orelse false)); + + if (this.nonblocking and this.file_type == .pipe) { + this.file_type = .nonblocking_pipe; + } + } + + this.fd = fd; + + return .{ .result = this }; + } +}; + +pub fn eventLoop(this: *const FileReader) JSC.EventLoopHandle { + return this.event_loop; +} + +pub fn loop(this: *const FileReader) *bun.Async.Loop { + return this.eventLoop().loop(); +} + +pub fn setup( + this: *FileReader, + fd: bun.FileDescriptor, +) void { + this.* = FileReader{ + .reader = .{}, + .done = false, + .fd = fd, + }; + + this.event_loop = this.parent().globalThis.bunVM().eventLoop(); +} + +pub fn onStart(this: *FileReader) streams.Start { + this.reader.setParent(this); + const was_lazy = this.lazy != .none; + var pollable = false; + var file_type: bun.io.FileType = .file; + if (this.lazy == .blob) { + switch (this.lazy.blob.data) { + .s3, .bytes => @panic("Invalid state in FileReader: expected file "), + .file => |*file| { + defer { + this.lazy.blob.deref(); + this.lazy = .none; + } + switch (Lazy.openFileBlob(file)) { + .err => |err| { + this.fd = bun.invalid_fd; + return .{ .err = err }; + }, + .result => |opened| { + bun.assert(opened.fd.isValid()); + this.fd = opened.fd; + pollable = opened.pollable; + file_type = opened.file_type; + this.reader.flags.nonblocking = opened.nonblocking; + this.reader.flags.pollable = pollable; + }, + } + }, + } + } + + { + const reader_fd = this.reader.getFd(); + if (reader_fd != bun.invalid_fd and this.fd == bun.invalid_fd) { + this.fd = reader_fd; + } + } + + this.event_loop = JSC.EventLoopHandle.init(this.parent().globalThis.bunVM().eventLoop()); + + if (was_lazy) { + _ = this.parent().incrementCount(); + this.waiting_for_onReaderDone = true; + if (this.start_offset) |offset| { + switch (this.reader.startFileOffset(this.fd, pollable, offset)) { + .result => {}, + .err => |e| { + return .{ .err = e }; + }, + } + } else { + switch (this.reader.start(this.fd, pollable)) { + .result => {}, + .err => |e| { + return .{ .err = e }; + }, + } + } + } else if (comptime Environment.isPosix) { + if (this.reader.flags.pollable and !this.reader.isDone()) { + this.waiting_for_onReaderDone = true; + _ = this.parent().incrementCount(); + } + } + + if (comptime Environment.isPosix) { + if (file_type == .socket) { + this.reader.flags.socket = true; + } + + if (this.reader.handle.getPoll()) |poll| { + if (file_type == .socket or this.reader.flags.socket) { + poll.flags.insert(.socket); + } else { + // if it's a TTY, we report it as a fifo + // we want the behavior to be as though it were a blocking pipe. + poll.flags.insert(.fifo); + } + + if (this.reader.flags.nonblocking) { + poll.flags.insert(.nonblocking); + } + } + } + + this.started = true; + + if (this.reader.isDone()) { + this.consumeReaderBuffer(); + if (this.buffered.items.len > 0) { + const buffered = this.buffered; + this.buffered = .{}; + return .{ .owned_and_done = bun.ByteList.init(buffered.items) }; + } + } else if (comptime Environment.isPosix) { + if (!was_lazy and this.reader.flags.pollable) { + this.reader.read(); + } + } + + return .{ .ready = {} }; +} + +pub fn parent(this: *@This()) *Source { + return @fieldParentPtr("context", this); +} + +pub fn onCancel(this: *FileReader) void { + if (this.done) return; + this.done = true; + this.reader.updateRef(false); + if (!this.reader.isDone()) + this.reader.close(); +} + +pub fn deinit(this: *FileReader) void { + this.buffered.deinit(bun.default_allocator); + this.reader.updateRef(false); + this.reader.deinit(); + this.pending_value.deinit(); + + if (this.lazy != .none) { + this.lazy.blob.deref(); + this.lazy = .none; + } + + this.parent().deinit(); +} + +pub fn onReadChunk(this: *@This(), init_buf: []const u8, state: bun.io.ReadState) bool { + var buf = init_buf; + log("onReadChunk() = {d} ({s}) - read_inside_on_pull: {s}", .{ buf.len, @tagName(state), @tagName(this.read_inside_on_pull) }); + + if (this.done) { + this.reader.close(); + return false; + } + var close = false; + defer if (close) this.reader.close(); + var hasMore = state != .eof; + + if (buf.len > 0) { + if (this.max_size) |max_size| { + if (this.total_readed >= max_size) return false; + const len = @min(max_size - this.total_readed, buf.len); + if (buf.len > len) { + buf = buf[0..len]; + } + this.total_readed += len; + + if (buf.len == 0) { + close = true; + hasMore = false; + } + } + } + + if (this.read_inside_on_pull != .none) { + switch (this.read_inside_on_pull) { + .js => |in_progress| { + if (in_progress.len >= buf.len and !hasMore) { + @memcpy(in_progress[0..buf.len], buf); + this.read_inside_on_pull = .{ .js = in_progress[buf.len..] }; + } else if (in_progress.len > 0 and !hasMore) { + this.read_inside_on_pull = .{ .temporary = buf }; + } else if (hasMore and !bun.isSliceInBuffer(buf, this.buffered.allocatedSlice())) { + this.buffered.appendSlice(bun.default_allocator, buf) catch bun.outOfMemory(); + this.read_inside_on_pull = .{ .use_buffered = buf.len }; + } + }, + .use_buffered => |original| { + this.buffered.appendSlice(bun.default_allocator, buf) catch bun.outOfMemory(); + this.read_inside_on_pull = .{ .use_buffered = buf.len + original }; + }, + .none => unreachable, + else => @panic("Invalid state"), + } + } else if (this.pending.state == .pending) { + if (buf.len == 0) { + { + if (this.buffered.items.len == 0) { + if (this.buffered.capacity > 0) { + this.buffered.clearAndFree(bun.default_allocator); + } + + if (this.reader.buffer().items.len != 0) { + this.buffered = this.reader.buffer().moveToUnmanaged(); + } + } + + var buffer = &this.buffered; + defer buffer.clearAndFree(bun.default_allocator); + if (buffer.items.len > 0) { + if (this.pending_view.len >= buffer.items.len) { + @memcpy(this.pending_view[0..buffer.items.len], buffer.items); + this.pending.result = .{ .into_array_and_done = .{ .value = this.pending_value.get() orelse .zero, .len = @truncate(buffer.items.len) } }; + } else { + this.pending.result = .{ .owned_and_done = bun.ByteList.fromList(buffer.*) }; + buffer.* = .{}; + } + } else { + this.pending.result = .{ .done = {} }; + } + } + this.pending_value.clearWithoutDeallocation(); + this.pending_view = &.{}; + this.pending.run(); + return false; + } + + const was_done = this.reader.isDone(); + + if (this.pending_view.len >= buf.len) { + @memcpy(this.pending_view[0..buf.len], buf); + this.reader.buffer().clearRetainingCapacity(); + this.buffered.clearRetainingCapacity(); + + if (was_done) { + this.pending.result = .{ + .into_array_and_done = .{ + .value = this.pending_value.get() orelse .zero, + .len = @truncate(buf.len), + }, + }; + } else { + this.pending.result = .{ + .into_array = .{ + .value = this.pending_value.get() orelse .zero, + .len = @truncate(buf.len), + }, + }; + } + + this.pending_value.clearWithoutDeallocation(); + this.pending_view = &.{}; + this.pending.run(); + return !was_done; + } + + if (!bun.isSliceInBuffer(buf, this.buffered.allocatedSlice())) { + if (this.reader.isDone()) { + if (bun.isSliceInBuffer(buf, this.reader.buffer().allocatedSlice())) { + this.reader.buffer().* = std.ArrayList(u8).init(bun.default_allocator); + } + this.pending.result = .{ + .temporary_and_done = bun.ByteList.init(buf), + }; + } else { + this.pending.result = .{ + .temporary = bun.ByteList.init(buf), + }; + + if (bun.isSliceInBuffer(buf, this.reader.buffer().allocatedSlice())) { + this.reader.buffer().clearRetainingCapacity(); + } + } + + this.pending_value.clearWithoutDeallocation(); + this.pending_view = &.{}; + this.pending.run(); + return !was_done; + } + + if (this.reader.isDone()) { + this.pending.result = .{ + .owned_and_done = bun.ByteList.init(buf), + }; + } else { + this.pending.result = .{ + .owned = bun.ByteList.init(buf), + }; + } + this.buffered = .{}; + this.pending_value.clearWithoutDeallocation(); + this.pending_view = &.{}; + this.pending.run(); + return !was_done; + } else if (!bun.isSliceInBuffer(buf, this.buffered.allocatedSlice())) { + this.buffered.appendSlice(bun.default_allocator, buf) catch bun.outOfMemory(); + if (bun.isSliceInBuffer(buf, this.reader.buffer().allocatedSlice())) { + this.reader.buffer().clearRetainingCapacity(); + } + } + + // For pipes, we have to keep pulling or the other process will block. + return this.read_inside_on_pull != .temporary and !(this.buffered.items.len + this.reader.buffer().items.len >= this.highwater_mark and !this.reader.flags.pollable); +} + +fn isPulling(this: *const FileReader) bool { + return this.read_inside_on_pull != .none; +} + +pub fn onPull(this: *FileReader, buffer: []u8, array: JSC.JSValue) streams.Result { + array.ensureStillAlive(); + defer array.ensureStillAlive(); + const drained = this.drain(); + + if (drained.len > 0) { + log("onPull({d}) = {d}", .{ buffer.len, drained.len }); + + this.pending_value.clearWithoutDeallocation(); + this.pending_view = &.{}; + + if (buffer.len >= @as(usize, drained.len)) { + @memcpy(buffer[0..drained.len], drained.slice()); + this.buffered.clearAndFree(bun.default_allocator); + + if (this.reader.isDone()) { + return .{ .into_array_and_done = .{ .value = array, .len = drained.len } }; + } else { + return .{ .into_array = .{ .value = array, .len = drained.len } }; + } + } + + if (this.reader.isDone()) { + return .{ .owned_and_done = drained }; + } else { + return .{ .owned = drained }; + } + } + + if (this.reader.isDone()) { + return .{ .done = {} }; + } + + if (!this.reader.hasPendingRead()) { + this.read_inside_on_pull = .{ .js = buffer }; + this.reader.read(); + + defer this.read_inside_on_pull = .{ .none = {} }; + switch (this.read_inside_on_pull) { + .js => |remaining_buf| { + const amount_read = buffer.len - remaining_buf.len; + + log("onPull({d}) = {d}", .{ buffer.len, amount_read }); + + if (amount_read > 0) { + if (this.reader.isDone()) { + return .{ .into_array_and_done = .{ .value = array, .len = @truncate(amount_read) } }; + } + + return .{ .into_array = .{ .value = array, .len = @truncate(amount_read) } }; + } + + if (this.reader.isDone()) { + return .{ .done = {} }; + } + }, + .temporary => |buf| { + log("onPull({d}) = {d}", .{ buffer.len, buf.len }); + if (this.reader.isDone()) { + return .{ .temporary_and_done = bun.ByteList.init(buf) }; + } + + return .{ .temporary = bun.ByteList.init(buf) }; + }, + .use_buffered => { + const buffered = this.buffered; + this.buffered = .{}; + log("onPull({d}) = {d}", .{ buffer.len, buffered.items.len }); + if (this.reader.isDone()) { + return .{ .owned_and_done = bun.ByteList.init(buffered.items) }; + } + + return .{ .owned = bun.ByteList.init(buffered.items) }; + }, + else => {}, + } + + if (this.reader.isDone()) { + log("onPull({d}) = done", .{buffer.len}); + + return .{ .done = {} }; + } + } + + this.pending_value.set(this.parent().globalThis, array); + this.pending_view = buffer; + + log("onPull({d}) = pending", .{buffer.len}); + + return .{ .pending = &this.pending }; +} + +pub fn drain(this: *FileReader) bun.ByteList { + if (this.buffered.items.len > 0) { + const out = bun.ByteList.init(this.buffered.items); + this.buffered = .{}; + if (comptime Environment.allow_assert) { + bun.assert(this.reader.buffer().items.ptr != out.ptr); + } + return out; + } + + if (this.reader.hasPendingRead()) { + return .{}; + } + + const out = this.reader.buffer().*; + this.reader.buffer().* = std.ArrayList(u8).init(bun.default_allocator); + return bun.ByteList.fromList(out); +} + +pub fn setRefOrUnref(this: *FileReader, enable: bool) void { + if (this.done) return; + this.reader.updateRef(enable); +} + +fn consumeReaderBuffer(this: *FileReader) void { + if (this.buffered.capacity == 0) { + this.buffered = this.reader.buffer().moveToUnmanaged(); + } +} + +pub fn onReaderDone(this: *FileReader) void { + log("onReaderDone()", .{}); + if (!this.isPulling()) { + this.consumeReaderBuffer(); + if (this.pending.state == .pending) { + if (this.buffered.items.len > 0) { + this.pending.result = .{ .owned_and_done = bun.ByteList.fromList(this.buffered) }; + } else { + this.pending.result = .{ .done = {} }; + } + this.buffered = .{}; + this.pending.run(); + } else if (this.buffered.items.len > 0) { + const this_value = this.parent().this_jsvalue; + const globalThis = this.parent().globalThis; + if (this_value != .zero) { + if (Source.js.onDrainCallbackGetCached(this_value)) |cb| { + const buffered = this.buffered; + this.buffered = .{}; + this.parent().incrementCount(); + defer _ = this.parent().decrementCount(); + this.eventLoop().js.runCallback( + cb, + globalThis, + .undefined, + &.{ + JSC.ArrayBuffer.fromBytes( + buffered.items, + .Uint8Array, + ).toJS( + globalThis, + null, + ), + }, + ); + } + } + } + } + + this.parent().onClose(); + if (this.waiting_for_onReaderDone) { + this.waiting_for_onReaderDone = false; + _ = this.parent().decrementCount(); + } +} + +pub fn onReaderError(this: *FileReader, err: bun.sys.Error) void { + this.consumeReaderBuffer(); + + this.pending.result = .{ .err = .{ .Error = err } }; + this.pending.run(); +} + +pub fn setRawMode(this: *FileReader, flag: bool) bun.sys.Maybe(void) { + if (!Environment.isWindows) { + @panic("FileReader.setRawMode must not be called on " ++ comptime Environment.os.displayString()); + } + return this.reader.setRawMode(flag); +} + +pub fn memoryCost(this: *const FileReader) usize { + // ReadableStreamSource covers @sizeOf(FileReader) + return this.reader.memoryCost() + this.buffered.capacity; +} + +pub const Source = ReadableStream.NewSource( + @This(), + "File", + onStart, + onPull, + onCancel, + deinit, + setRefOrUnref, + drain, + memoryCost, + null, +); + +const std = @import("std"); +const bun = @import("bun"); +const Output = bun.Output; +const Environment = bun.Environment; +const JSC = bun.jsc; +const webcore = bun.webcore; +const streams = webcore.streams; +const Blob = webcore.Blob; +const ReadableStream = webcore.ReadableStream; diff --git a/src/bun.js/webcore/FileSink.zig b/src/bun.js/webcore/FileSink.zig new file mode 100644 index 0000000000..5310ff6887 --- /dev/null +++ b/src/bun.js/webcore/FileSink.zig @@ -0,0 +1,670 @@ +const FileSink = @This(); + +ref_count: RefCount, +writer: IOWriter = .{}, +event_loop_handle: JSC.EventLoopHandle, +written: usize = 0, +pending: streams.Result.Writable.Pending = .{ + .result = .{ .done = {} }, +}, +signal: streams.Signal = .{}, +done: bool = false, +started: bool = false, +must_be_kept_alive_until_eof: bool = false, + +// TODO: these fields are duplicated on writer() +// we should not duplicate these fields... +pollable: bool = false, +nonblocking: bool = false, +force_sync: bool = false, + +is_socket: bool = false, +fd: bun.FileDescriptor = bun.invalid_fd, + +auto_flusher: webcore.AutoFlusher = .{}, +run_pending_later: FlushPendingTask = .{}, + +const log = Output.scoped(.FileSink, false); + +pub const RefCount = bun.ptr.RefCount(FileSink, "ref_count", deinit, .{}); +pub const ref = RefCount.ref; +pub const deref = RefCount.deref; + +pub const IOWriter = bun.io.StreamingWriter(@This(), opaque { + pub const onClose = FileSink.onClose; + pub const onWritable = FileSink.onReady; + pub const onError = FileSink.onError; + pub const onWrite = FileSink.onWrite; +}); +pub const Poll = IOWriter; + +pub const Options = struct { + chunk_size: Blob.SizeType = 1024, + input_path: webcore.PathOrFileDescriptor, + truncate: bool = true, + close: bool = false, + mode: bun.Mode = 0o664, + + pub fn flags(this: *const Options) i32 { + _ = this; + + return bun.O.NONBLOCK | bun.O.CLOEXEC | bun.O.CREAT | bun.O.WRONLY; + } +}; + +pub fn memoryCost(this: *const FileSink) usize { + // Since this is a JSSink, the NewJSSink function does @sizeOf(JSSink) which includes @sizeOf(FileSink). + return this.writer.memoryCost(); +} + +fn Bun__ForceFileSinkToBeSynchronousForProcessObjectStdio(_: *JSC.JSGlobalObject, jsvalue: JSC.JSValue) callconv(.C) void { + var this: *FileSink = @alignCast(@ptrCast(JSSink.fromJS(jsvalue) orelse return)); + this.force_sync = true; + if (comptime !Environment.isWindows) { + this.writer.force_sync = true; + if (this.fd != bun.invalid_fd) { + _ = bun.sys.updateNonblocking(this.fd, false); + } + } +} + +comptime { + @export(&Bun__ForceFileSinkToBeSynchronousForProcessObjectStdio, .{ .name = "Bun__ForceFileSinkToBeSynchronousForProcessObjectStdio" }); +} + +pub fn onAttachedProcessExit(this: *FileSink) void { + log("onAttachedProcessExit()", .{}); + this.done = true; + this.writer.close(); + + this.pending.result = .{ .err = .fromCode(.PIPE, .write) }; + + this.runPending(); + + if (this.must_be_kept_alive_until_eof) { + this.must_be_kept_alive_until_eof = false; + this.deref(); + } +} + +fn runPending(this: *FileSink) void { + this.ref(); + defer this.deref(); + + this.run_pending_later.has = false; + const l = this.eventLoop(); + l.enter(); + defer l.exit(); + this.pending.run(); +} + +pub fn onWrite(this: *FileSink, amount: usize, status: bun.io.WriteStatus) void { + log("onWrite({d}, {any})", .{ amount, status }); + + this.written += amount; + + // TODO: on windows done means ended (no pending data on the buffer) on unix we can still have pending data on the buffer + // we should unify the behaviors to simplify this + const has_pending_data = this.writer.hasPendingData(); + // Only keep the event loop ref'd while there's a pending write in progress. + // If there's no pending write, no need to keep the event loop ref'd. + this.writer.updateRef(this.eventLoop(), has_pending_data); + + if (has_pending_data) { + if (this.event_loop_handle.bunVM()) |vm| { + if (!vm.is_inside_deferred_task_queue) { + webcore.AutoFlusher.registerDeferredMicrotaskWithType(@This(), this, vm); + } + } + } + + // if we are not done yet and has pending data we just wait so we do not runPending twice + if (status == .pending and has_pending_data) { + if (this.pending.state == .pending) { + this.pending.consumed = @truncate(amount); + } + return; + } + + if (this.pending.state == .pending) { + this.pending.consumed = @truncate(amount); + + // when "done" is true, we will never receive more data. + if (this.done or status == .end_of_file) { + this.pending.result = .{ .owned_and_done = this.pending.consumed }; + } else { + this.pending.result = .{ .owned = this.pending.consumed }; + } + + this.runPending(); + + // this.done == true means ended was called + const ended_and_done = this.done and status == .end_of_file; + + if (this.done and status == .drained) { + // if we call end/endFromJS and we have some pending returned from .flush() we should call writer.end() + this.writer.end(); + } else if (ended_and_done and !has_pending_data) { + this.writer.close(); + } + } + + if (status == .end_of_file) { + if (this.must_be_kept_alive_until_eof) { + this.must_be_kept_alive_until_eof = false; + this.deref(); + } + this.signal.close(null); + } +} + +pub fn onError(this: *FileSink, err: bun.sys.Error) void { + log("onError({any})", .{err}); + if (this.pending.state == .pending) { + this.pending.result = .{ .err = err }; + if (this.eventLoop().bunVM()) |vm| { + if (vm.is_inside_deferred_task_queue) { + this.runPendingLater(); + return; + } + } + + this.runPending(); + } +} + +pub fn onReady(this: *FileSink) void { + log("onReady()", .{}); + + this.signal.ready(null, null); +} + +pub fn onClose(this: *FileSink) void { + log("onClose()", .{}); + this.signal.close(null); +} + +pub fn createWithPipe( + event_loop_: anytype, + pipe: *uv.Pipe, +) *FileSink { + if (Environment.isPosix) { + @compileError("FileSink.createWithPipe is only available on Windows"); + } + + const evtloop = switch (@TypeOf(event_loop_)) { + JSC.EventLoopHandle => event_loop_, + else => JSC.EventLoopHandle.init(event_loop_), + }; + + var this = bun.new(FileSink, .{ + .ref_count = .init(), + .event_loop_handle = JSC.EventLoopHandle.init(evtloop), + .fd = pipe.fd(), + }); + this.writer.setPipe(pipe); + this.writer.setParent(this); + return this; +} + +pub fn create( + event_loop_: anytype, + fd: bun.FileDescriptor, +) *FileSink { + const evtloop = switch (@TypeOf(event_loop_)) { + JSC.EventLoopHandle => event_loop_, + else => JSC.EventLoopHandle.init(event_loop_), + }; + var this = bun.new(FileSink, .{ + .ref_count = .init(), + .event_loop_handle = JSC.EventLoopHandle.init(evtloop), + .fd = fd, + }); + this.writer.setParent(this); + return this; +} + +pub fn setup(this: *FileSink, options: *const FileSink.Options) JSC.Maybe(void) { + // TODO: this should be concurrent. + var isatty = false; + var is_nonblocking = false; + const fd = switch (switch (options.input_path) { + .path => |path| brk: { + is_nonblocking = true; + break :brk bun.sys.openA(path.slice(), options.flags(), options.mode); + }, + .fd => |fd_| brk: { + const duped = bun.sys.dupWithFlags(fd_, 0); + + break :brk duped; + }, + }) { + .err => |err| return .{ .err = err }, + .result => |fd| fd, + }; + + if (comptime Environment.isPosix) { + switch (bun.sys.fstat(fd)) { + .err => |err| { + fd.close(); + return .{ .err = err }; + }, + .result => |stat| { + this.pollable = bun.sys.isPollable(stat.mode); + if (!this.pollable) { + isatty = std.posix.isatty(fd.native()); + } + + if (isatty) { + this.pollable = true; + } + + this.fd = fd; + this.is_socket = std.posix.S.ISSOCK(stat.mode); + + if (this.force_sync or isatty) { + // Prevents interleaved or dropped stdout/stderr output for terminals. + // As noted in the following reference, local TTYs tend to be quite fast and + // this behavior has become expected due historical functionality on OS X, + // even though it was originally intended to change in v1.0.2 (Libuv 1.2.1). + // Ref: https://github.com/nodejs/node/pull/1771#issuecomment-119351671 + _ = bun.sys.updateNonblocking(fd, false); + is_nonblocking = false; + this.force_sync = true; + this.writer.force_sync = true; + } else if (!is_nonblocking) { + const flags = switch (bun.sys.getFcntlFlags(fd)) { + .result => |flags| flags, + .err => |err| { + fd.close(); + return .{ .err = err }; + }, + }; + is_nonblocking = (flags & @as(@TypeOf(flags), bun.O.NONBLOCK)) != 0; + + if (!is_nonblocking) { + if (bun.sys.setNonblocking(fd) == .result) { + is_nonblocking = true; + } + } + } + + this.nonblocking = is_nonblocking and this.pollable; + }, + } + } else if (comptime Environment.isWindows) { + this.pollable = (bun.windows.GetFileType(fd.cast()) & bun.windows.FILE_TYPE_PIPE) != 0 and !this.force_sync; + this.fd = fd; + } else { + @compileError("TODO: implement for this platform"); + } + + if (comptime Environment.isWindows) { + if (this.force_sync) { + switch (this.writer.startSync( + fd, + this.pollable, + )) { + .err => |err| { + fd.close(); + return .{ .err = err }; + }, + .result => { + this.writer.updateRef(this.eventLoop(), false); + }, + } + return .{ .result = {} }; + } + } + + switch (this.writer.start( + fd, + this.pollable, + )) { + .err => |err| { + fd.close(); + return .{ .err = err }; + }, + .result => { + // Only keep the event loop ref'd while there's a pending write in progress. + // If there's no pending write, no need to keep the event loop ref'd. + this.writer.updateRef(this.eventLoop(), false); + if (comptime Environment.isPosix) { + if (this.nonblocking) { + this.writer.getPoll().?.flags.insert(.nonblocking); + } + + if (this.is_socket) { + this.writer.getPoll().?.flags.insert(.socket); + } else if (this.pollable) { + this.writer.getPoll().?.flags.insert(.fifo); + } + } + }, + } + + return .{ .result = {} }; +} + +pub fn loop(this: *FileSink) *bun.Async.Loop { + return this.event_loop_handle.loop(); +} + +pub fn eventLoop(this: *FileSink) JSC.EventLoopHandle { + return this.event_loop_handle; +} + +pub fn connect(this: *FileSink, signal: streams.Signal) void { + this.signal = signal; +} + +pub fn start(this: *FileSink, stream_start: streams.Start) JSC.Maybe(void) { + switch (stream_start) { + .FileSink => |*file| { + switch (this.setup(file)) { + .err => |err| { + return .{ .err = err }; + }, + .result => {}, + } + }, + else => {}, + } + + this.done = false; + this.started = true; + this.signal.start(); + return .{ .result = {} }; +} + +pub fn runPendingLater(this: *FileSink) void { + if (this.run_pending_later.has) { + return; + } + this.run_pending_later.has = true; + const event_loop = this.eventLoop(); + if (event_loop == .js) { + this.ref(); + event_loop.js.enqueueTask(JSC.Task.init(&this.run_pending_later)); + } +} + +pub fn onAutoFlush(this: *FileSink) bool { + if (this.done or !this.writer.hasPendingData()) { + this.updateRef(false); + this.auto_flusher.registered = false; + return false; + } + + this.ref(); + defer this.deref(); + + const amount_buffered = this.writer.outgoing.size(); + + switch (this.writer.flush()) { + .err, .done => { + this.updateRef(false); + this.runPendingLater(); + }, + .wrote => |amount_drained| { + if (amount_drained == amount_buffered) { + this.updateRef(false); + this.runPendingLater(); + } + }, + else => { + return true; + }, + } + + const is_registered = !this.writer.hasPendingData(); + this.auto_flusher.registered = is_registered; + return is_registered; +} + +pub fn flush(_: *FileSink) JSC.Maybe(void) { + return .{ .result = {} }; +} + +pub fn flushFromJS(this: *FileSink, globalThis: *JSGlobalObject, wait: bool) JSC.Maybe(JSValue) { + _ = wait; + + if (this.pending.state == .pending) { + return .{ .result = this.pending.future.promise.strong.value() }; + } + + if (this.done) { + return .{ .result = .undefined }; + } + + const rc = this.writer.flush(); + switch (rc) { + .done => |written| { + this.written += @truncate(written); + }, + .pending => |written| { + this.written += @truncate(written); + }, + .wrote => |written| { + this.written += @truncate(written); + }, + .err => |err| { + return .{ .err = err }; + }, + } + return switch (this.toResult(rc)) { + .err => unreachable, + else => |result| .{ .result = result.toJS(globalThis) }, + }; +} + +pub fn finalize(this: *FileSink) void { + this.pending.deinit(); + this.deref(); +} + +pub fn init(fd: bun.FileDescriptor, event_loop_handle: anytype) *FileSink { + var this = bun.new(FileSink, .{ + .ref_count = .init(), + .writer = .{}, + .fd = fd, + .event_loop_handle = JSC.EventLoopHandle.init(event_loop_handle), + }); + this.writer.setParent(this); + + return this; +} + +pub fn construct(this: *FileSink, _: std.mem.Allocator) void { + this.* = FileSink{ + .ref_count = .init(), + .event_loop_handle = JSC.EventLoopHandle.init(JSC.VirtualMachine.get().eventLoop()), + }; +} + +pub fn write(this: *@This(), data: streams.Result) streams.Result.Writable { + if (this.done) { + return .{ .done = {} }; + } + + return this.toResult(this.writer.write(data.slice())); +} +pub const writeBytes = write; +pub fn writeLatin1(this: *@This(), data: streams.Result) streams.Result.Writable { + if (this.done) { + return .{ .done = {} }; + } + + return this.toResult(this.writer.writeLatin1(data.slice())); +} +pub fn writeUTF16(this: *@This(), data: streams.Result) streams.Result.Writable { + if (this.done) { + return .{ .done = {} }; + } + + return this.toResult(this.writer.writeUTF16(data.slice16())); +} + +pub fn end(this: *FileSink, _: ?bun.sys.Error) JSC.Maybe(void) { + if (this.done) { + return .{ .result = {} }; + } + + switch (this.writer.flush()) { + .done => |written| { + this.written += @truncate(written); + this.writer.end(); + return .{ .result = {} }; + }, + .err => |e| { + this.writer.close(); + return .{ .err = e }; + }, + .pending => |written| { + this.written += @truncate(written); + if (!this.must_be_kept_alive_until_eof) { + this.must_be_kept_alive_until_eof = true; + this.ref(); + } + this.done = true; + return .{ .result = {} }; + }, + .wrote => |written| { + this.written += @truncate(written); + this.writer.end(); + return .{ .result = {} }; + }, + } +} + +fn deinit(this: *FileSink) void { + this.pending.deinit(); + this.writer.deinit(); + if (this.event_loop_handle.globalObject()) |global| { + webcore.AutoFlusher.unregisterDeferredMicrotaskWithType(@This(), this, global.bunVM()); + } + bun.destroy(this); +} + +pub fn toJS(this: *FileSink, globalThis: *JSGlobalObject) JSValue { + return JSSink.createObject(globalThis, this, 0); +} + +pub fn toJSWithDestructor(this: *FileSink, globalThis: *JSGlobalObject, destructor: ?Sink.DestructorPtr) JSValue { + return JSSink.createObject(globalThis, this, if (destructor) |dest| @intFromPtr(dest.ptr()) else 0); +} + +pub fn endFromJS(this: *FileSink, globalThis: *JSGlobalObject) JSC.Maybe(JSValue) { + if (this.done) { + if (this.pending.state == .pending) { + return .{ .result = this.pending.future.promise.strong.value() }; + } + + return .{ .result = JSValue.jsNumber(this.written) }; + } + + switch (this.writer.flush()) { + .done => |written| { + this.updateRef(false); + this.writer.end(); + return .{ .result = JSValue.jsNumber(written) }; + }, + .err => |err| { + this.writer.close(); + return .{ .err = err }; + }, + .pending => |pending_written| { + this.written += @truncate(pending_written); + if (!this.must_be_kept_alive_until_eof) { + this.must_be_kept_alive_until_eof = true; + this.ref(); + } + this.done = true; + this.pending.result = .{ .owned = @truncate(pending_written) }; + return .{ .result = this.pending.promise(globalThis).asValue(globalThis) }; + }, + .wrote => |written| { + this.writer.end(); + return .{ .result = JSValue.jsNumber(written) }; + }, + } +} + +pub fn sink(this: *FileSink) Sink { + return Sink.init(this); +} + +pub fn updateRef(this: *FileSink, value: bool) void { + if (value) { + this.writer.enableKeepingProcessAlive(this.event_loop_handle); + } else { + this.writer.disableKeepingProcessAlive(this.event_loop_handle); + } +} + +pub const JSSink = Sink.JSSink(@This(), "FileSink"); + +fn getFd(this: *const @This()) i32 { + if (Environment.isWindows) { + return switch (this.fd.decodeWindows()) { + .windows => -1, // TODO: + .uv => |num| num, + }; + } + return this.fd.cast(); +} + +fn toResult(this: *FileSink, write_result: bun.io.WriteResult) streams.Result.Writable { + switch (write_result) { + .done => |amt| { + if (amt > 0) + return .{ .owned_and_done = @truncate(amt) }; + + return .{ .done = {} }; + }, + .wrote => |amt| { + if (amt > 0) + return .{ .owned = @truncate(amt) }; + + return .{ .temporary = @truncate(amt) }; + }, + .err => |err| { + return .{ .err = err }; + }, + .pending => |pending_written| { + if (!this.must_be_kept_alive_until_eof) { + this.must_be_kept_alive_until_eof = true; + this.ref(); + } + this.pending.consumed += @truncate(pending_written); + this.pending.result = .{ .owned = @truncate(pending_written) }; + return .{ .pending = &this.pending }; + }, + } +} + +pub const FlushPendingTask = struct { + has: bool = false, + + pub fn runFromJSThread(flush_pending: *FlushPendingTask) void { + const had = flush_pending.has; + flush_pending.has = false; + const this: *FileSink = @alignCast(@fieldParentPtr("run_pending_later", flush_pending)); + defer this.deref(); + if (had) + this.runPending(); + } +}; + +const std = @import("std"); +const bun = @import("bun"); +const uv = bun.windows.libuv; +const Output = bun.Output; +const Environment = bun.Environment; +const JSC = bun.jsc; +const webcore = bun.webcore; +const Blob = webcore.Blob; +const Sink = webcore.Sink; +const streams = webcore.streams; +const JSGlobalObject = JSC.JSGlobalObject; +const JSValue = JSC.JSValue; diff --git a/src/bun.js/webcore/ObjectURLRegistry.zig b/src/bun.js/webcore/ObjectURLRegistry.zig index 446915c70a..19c57ce2da 100644 --- a/src/bun.js/webcore/ObjectURLRegistry.zig +++ b/src/bun.js/webcore/ObjectURLRegistry.zig @@ -6,19 +6,19 @@ const assert = bun.assert; const ObjectURLRegistry = @This(); lock: bun.Mutex = .{}, -map: std.AutoHashMap(UUID, *RegistryEntry) = std.AutoHashMap(UUID, *RegistryEntry).init(bun.default_allocator), +map: std.AutoHashMap(UUID, *Entry) = std.AutoHashMap(UUID, *Entry).init(bun.default_allocator), -pub const RegistryEntry = struct { +pub const Entry = struct { blob: JSC.WebCore.Blob, pub const new = bun.TrivialNew(@This()); - pub fn init(blob: *const JSC.WebCore.Blob) *RegistryEntry { - return RegistryEntry.new(.{ + pub fn init(blob: *const JSC.WebCore.Blob) *Entry { + return Entry.new(.{ .blob = blob.dupeWithContentType(true), }); } - pub fn deinit(this: *RegistryEntry) void { + pub fn deinit(this: *Entry) void { this.blob.deinit(); bun.destroy(this); } @@ -26,7 +26,7 @@ pub const RegistryEntry = struct { pub fn register(this: *ObjectURLRegistry, vm: *JSC.VirtualMachine, blob: *const JSC.WebCore.Blob) UUID { const uuid = vm.rareData().nextUUID(); - const entry = RegistryEntry.init(blob); + const entry = Entry.init(blob); this.lock.lock(); defer this.lock.unlock(); @@ -90,7 +90,7 @@ pub fn has(this: *ObjectURLRegistry, pathname: []const u8) bool { } comptime { - const Bun__createObjectURL = JSC.toJSHostFunction(Bun__createObjectURL_); + const Bun__createObjectURL = JSC.toJSHostFn(Bun__createObjectURL_); @export(&Bun__createObjectURL, .{ .name = "Bun__createObjectURL" }); } fn Bun__createObjectURL_(globalObject: *JSC.JSGlobalObject, callframe: *JSC.CallFrame) bun.JSError!JSC.JSValue { @@ -108,7 +108,7 @@ fn Bun__createObjectURL_(globalObject: *JSC.JSGlobalObject, callframe: *JSC.Call } comptime { - const Bun__revokeObjectURL = JSC.toJSHostFunction(Bun__revokeObjectURL_); + const Bun__revokeObjectURL = JSC.toJSHostFn(Bun__revokeObjectURL_); @export(&Bun__revokeObjectURL, .{ .name = "Bun__revokeObjectURL" }); } fn Bun__revokeObjectURL_(globalObject: *JSC.JSGlobalObject, callframe: *JSC.CallFrame) bun.JSError!JSC.JSValue { @@ -137,7 +137,7 @@ fn Bun__revokeObjectURL_(globalObject: *JSC.JSGlobalObject, callframe: *JSC.Call } comptime { - const jsFunctionResolveObjectURL = JSC.toJSHostFunction(jsFunctionResolveObjectURL_); + const jsFunctionResolveObjectURL = JSC.toJSHostFn(jsFunctionResolveObjectURL_); @export(&jsFunctionResolveObjectURL, .{ .name = "jsFunctionResolveObjectURL" }); } fn jsFunctionResolveObjectURL_(globalObject: *JSC.JSGlobalObject, callframe: *JSC.CallFrame) bun.JSError!JSC.JSValue { diff --git a/src/bun.js/webcore/ReadableStream.zig b/src/bun.js/webcore/ReadableStream.zig new file mode 100644 index 0000000000..96af7e0290 --- /dev/null +++ b/src/bun.js/webcore/ReadableStream.zig @@ -0,0 +1,843 @@ +const ReadableStream = @This(); + +value: JSValue, +ptr: Source, + +pub const Strong = struct { + held: JSC.Strong = .empty, + + pub fn has(this: *Strong) bool { + return this.held.has(); + } + + pub fn isDisturbed(this: *const Strong, global: *JSC.JSGlobalObject) bool { + if (this.get(global)) |stream| { + return stream.isDisturbed(global); + } + + return false; + } + + pub fn init(this: ReadableStream, global: *JSGlobalObject) Strong { + return .{ + .held = JSC.Strong.create(this.value, global), + }; + } + + pub fn get(this: *const Strong, global: *JSC.JSGlobalObject) ?ReadableStream { + if (this.held.get()) |value| { + return ReadableStream.fromJS(value, global); + } + return null; + } + + pub fn deinit(this: *Strong) void { + // if (this.held.get()) |val| { + // ReadableStream__detach(val, this.held.globalThis.?); + // } + this.held.deinit(); + } + + pub fn tee(this: *Strong, global: *JSGlobalObject) ?ReadableStream { + if (this.get(global)) |stream| { + const first, const second = stream.tee(global) orelse return null; + this.held.set(global, first.value); + return second; + } + return null; + } +}; + +extern fn ReadableStream__tee(stream: JSValue, globalThis: *JSGlobalObject, out1: *JSC.JSValue, out2: *JSC.JSValue) bool; +pub fn tee(this: *const ReadableStream, globalThis: *JSGlobalObject) ?struct { ReadableStream, ReadableStream } { + var out1: JSC.JSValue = .zero; + var out2: JSC.JSValue = .zero; + if (!ReadableStream__tee(this.value, globalThis, &out1, &out2)) { + return null; + } + const out_stream2 = ReadableStream.fromJS(out2, globalThis) orelse return null; + const out_stream1 = ReadableStream.fromJS(out1, globalThis) orelse return null; + return .{ out_stream1, out_stream2 }; +} + +pub fn toJS(this: *const ReadableStream) JSValue { + return this.value; +} + +pub fn reloadTag(this: *ReadableStream, globalThis: *JSC.JSGlobalObject) void { + if (ReadableStream.fromJS(this.value, globalThis)) |stream| { + this.* = stream; + } else { + this.* = .{ .ptr = .{ .Invalid = {} }, .value = .zero }; + } +} + +pub fn toAnyBlob( + stream: *ReadableStream, + globalThis: *JSC.JSGlobalObject, +) ?Blob.Any { + if (stream.isDisturbed(globalThis)) { + return null; + } + + stream.reloadTag(globalThis); + + switch (stream.ptr) { + .Blob => |blobby| { + if (blobby.toAnyBlob(globalThis)) |blob| { + stream.done(globalThis); + return blob; + } + }, + .File => |blobby| { + if (blobby.lazy == .blob) { + var blob = Blob.initWithStore(blobby.lazy.blob, globalThis); + blob.store.?.ref(); + // it should be lazy, file shouldn't have opened yet. + bun.assert(!blobby.started); + stream.done(globalThis); + return .{ .Blob = blob }; + } + }, + .Bytes => |bytes| { + // If we've received the complete body by the time this function is called + // we can avoid streaming it and convert it to a Blob + if (bytes.toAnyBlob()) |blob| { + stream.done(globalThis); + return blob; + } + + return null; + }, + else => {}, + } + + return null; +} + +pub fn done(this: *const ReadableStream, globalThis: *JSGlobalObject) void { + JSC.markBinding(@src()); + // done is called when we are done consuming the stream + // cancel actually mark the stream source as done + // this will resolve any pending promises to done: true + switch (this.ptr) { + .Blob => |source| { + source.parent().cancel(); + }, + .File => |source| { + source.parent().cancel(); + }, + .Bytes => |source| { + source.parent().cancel(); + }, + else => {}, + } + this.detachIfPossible(globalThis); +} + +pub fn cancel(this: *const ReadableStream, globalThis: *JSGlobalObject) void { + JSC.markBinding(@src()); + // cancel the stream + ReadableStream__cancel(this.value, globalThis); + // mark the stream source as done + this.done(globalThis); +} + +pub fn abort(this: *const ReadableStream, globalThis: *JSGlobalObject) void { + JSC.markBinding(@src()); + // for now we are just calling cancel should be fine + this.cancel(globalThis); +} + +pub fn forceDetach(this: *const ReadableStream, globalObject: *JSGlobalObject) void { + ReadableStream__detach(this.value, globalObject); +} + +/// Decrement Source ref count and detach the underlying stream if ref count is zero +/// be careful, this can invalidate the stream do not call this multiple times +/// this is meant to be called only once when we are done consuming the stream or from the ReadableStream.Strong.deinit +pub fn detachIfPossible(_: *const ReadableStream, _: *JSGlobalObject) void { + JSC.markBinding(@src()); +} + +pub const Tag = enum(i32) { + Invalid = -1, + + /// ReadableStreamDefaultController or ReadableByteStreamController + JavaScript = 0, + + /// ReadableByteStreamController + /// but with a BlobLoader + /// we can skip the BlobLoader and just use the underlying Blob + Blob = 1, + + /// ReadableByteStreamController + /// but with a FileLoader + /// we can skip the FileLoader and just use the underlying File + File = 2, + + /// This is a direct readable stream + /// That means we can turn it into whatever we want + Direct = 3, + + Bytes = 4, +}; + +pub const Source = union(Tag) { + Invalid: void, + /// ReadableStreamDefaultController or ReadableByteStreamController + JavaScript: void, + /// ReadableByteStreamController + /// but with a BlobLoader + /// we can skip the BlobLoader and just use the underlying Blob + Blob: *webcore.ByteBlobLoader, + + /// ReadableByteStreamController + /// but with a FileLoader + /// we can skip the FileLoader and just use the underlying File + File: *webcore.FileReader, + + /// This is a direct readable stream + /// That means we can turn it into whatever we want + Direct: void, + + Bytes: *webcore.ByteStream, +}; + +extern fn ReadableStreamTag__tagged(globalObject: *JSGlobalObject, possibleReadableStream: *JSValue, ptr: *?*anyopaque) Tag; +extern fn ReadableStream__isDisturbed(possibleReadableStream: JSValue, globalObject: *JSGlobalObject) bool; +extern fn ReadableStream__isLocked(possibleReadableStream: JSValue, globalObject: *JSGlobalObject) bool; +extern fn ReadableStream__empty(*JSGlobalObject) JSC.JSValue; +extern fn ReadableStream__used(*JSGlobalObject) JSC.JSValue; +extern fn ReadableStream__cancel(stream: JSValue, *JSGlobalObject) void; +extern fn ReadableStream__abort(stream: JSValue, *JSGlobalObject) void; +extern fn ReadableStream__detach(stream: JSValue, *JSGlobalObject) void; +extern fn ReadableStream__fromBlob( + *JSGlobalObject, + store: *anyopaque, + offset: usize, + length: usize, +) JSC.JSValue; + +pub fn isDisturbed(this: *const ReadableStream, globalObject: *JSGlobalObject) bool { + JSC.markBinding(@src()); + return isDisturbedValue(this.value, globalObject); +} + +pub fn isDisturbedValue(value: JSC.JSValue, globalObject: *JSGlobalObject) bool { + JSC.markBinding(@src()); + return ReadableStream__isDisturbed(value, globalObject); +} + +pub fn isLocked(this: *const ReadableStream, globalObject: *JSGlobalObject) bool { + JSC.markBinding(@src()); + return ReadableStream__isLocked(this.value, globalObject); +} + +pub fn fromJS(value: JSValue, globalThis: *JSGlobalObject) ?ReadableStream { + JSC.markBinding(@src()); + value.ensureStillAlive(); + var out = value; + + var ptr: ?*anyopaque = null; + return switch (ReadableStreamTag__tagged(globalThis, &out, &ptr)) { + .JavaScript => ReadableStream{ + .value = out, + .ptr = .{ + .JavaScript = {}, + }, + }, + .Blob => ReadableStream{ + .value = out, + .ptr = .{ + .Blob = @ptrCast(@alignCast(ptr.?)), + }, + }, + .File => ReadableStream{ + .value = out, + .ptr = .{ + .File = @ptrCast(@alignCast(ptr.?)), + }, + }, + + .Bytes => ReadableStream{ + .value = out, + .ptr = .{ + .Bytes = @ptrCast(@alignCast(ptr.?)), + }, + }, + + // .HTTPRequest => ReadableStream{ + // .value = out, + // .ptr = .{ + // .HTTPRequest = ptr.asPtr(HTTPRequest), + // }, + // }, + // .HTTPSRequest => ReadableStream{ + // .value = out, + // .ptr = .{ + // .HTTPSRequest = ptr.asPtr(HTTPSRequest), + // }, + // }, + else => null, + }; +} + +extern fn ZigGlobalObject__createNativeReadableStream(*JSGlobalObject, nativePtr: JSValue) JSValue; + +pub fn fromNative(globalThis: *JSGlobalObject, native: JSC.JSValue) JSC.JSValue { + JSC.markBinding(@src()); + return ZigGlobalObject__createNativeReadableStream(globalThis, native); +} + +pub fn fromBlob(globalThis: *JSGlobalObject, blob: *const Blob, recommended_chunk_size: Blob.SizeType) JSC.JSValue { + JSC.markBinding(@src()); + var store = blob.store orelse { + return ReadableStream.empty(globalThis); + }; + switch (store.data) { + .bytes => { + var reader = webcore.ByteBlobLoader.Source.new( + .{ + .globalThis = globalThis, + .context = undefined, + }, + ); + reader.context.setup(blob, recommended_chunk_size); + return reader.toReadableStream(globalThis); + }, + .file => { + var reader = webcore.FileReader.Source.new(.{ + .globalThis = globalThis, + .context = .{ + .event_loop = JSC.EventLoopHandle.init(globalThis.bunVM().eventLoop()), + .start_offset = blob.offset, + .max_size = if (blob.size != Blob.max_size) blob.size else null, + + .lazy = .{ + .blob = store, + }, + }, + }); + store.ref(); + + return reader.toReadableStream(globalThis); + }, + .s3 => |*s3| { + const credentials = s3.getCredentials(); + const path = s3.path(); + const proxy = globalThis.bunVM().transpiler.env.getHttpProxy(true, null); + const proxy_url = if (proxy) |p| p.href else null; + + return bun.S3.readableStream(credentials, path, blob.offset, if (blob.size != Blob.max_size) blob.size else null, proxy_url, globalThis); + }, + } +} + +pub fn fromFileBlobWithOffset( + globalThis: *JSGlobalObject, + blob: *const Blob, + offset: usize, +) bun.JSError!JSC.JSValue { + JSC.markBinding(@src()); + var store = blob.store orelse { + return ReadableStream.empty(globalThis); + }; + switch (store.data) { + .file => { + var reader = webcore.FileReader.Source.new(.{ + .globalThis = globalThis, + .context = .{ + .event_loop = JSC.EventLoopHandle.init(globalThis.bunVM().eventLoop()), + .start_offset = offset, + .lazy = .{ + .blob = store, + }, + }, + }); + store.ref(); + + return reader.toReadableStream(globalThis); + }, + else => { + return globalThis.throw("Expected FileBlob", .{}); + }, + } +} + +pub fn fromPipe( + globalThis: *JSGlobalObject, + parent: anytype, + buffered_reader: anytype, +) JSC.JSValue { + _ = parent; // autofix + JSC.markBinding(@src()); + var source = webcore.FileReader.Source.new(.{ + .globalThis = globalThis, + .context = .{ + .event_loop = JSC.EventLoopHandle.init(globalThis.bunVM().eventLoop()), + }, + }); + source.context.reader.from(buffered_reader, &source.context); + + return source.toReadableStream(globalThis); +} + +pub fn empty(globalThis: *JSGlobalObject) JSC.JSValue { + JSC.markBinding(@src()); + + return ReadableStream__empty(globalThis); +} + +pub fn used(globalThis: *JSGlobalObject) JSC.JSValue { + JSC.markBinding(@src()); + + return ReadableStream__used(globalThis); +} + +pub const StreamTag = enum(usize) { + invalid = 0, + _, + + pub fn init(filedes: bun.FileDescriptor) StreamTag { + var bytes = [8]u8{ 1, 0, 0, 0, 0, 0, 0, 0 }; + const filedes_ = @as([8]u8, @bitCast(@as(usize, @as(u56, @truncate(@as(usize, @intCast(filedes))))))); + bytes[1..8].* = filedes_[0..7].*; + + return @as(StreamTag, @enumFromInt(@as(u64, @bitCast(bytes)))); + } + + pub fn fd(this: StreamTag) bun.FileDescriptor { + var bytes = @as([8]u8, @bitCast(@intFromEnum(this))); + if (bytes[0] != 1) { + return bun.invalid_fd; + } + const out: u64 = 0; + @as([8]u8, @bitCast(out))[0..7].* = bytes[1..8].*; + return @as(bun.FileDescriptor, @intCast(out)); + } +}; + +pub fn NewSource( + comptime Context: type, + comptime name_: []const u8, + comptime onStart: anytype, + comptime onPull: anytype, + comptime onCancel: fn (this: *Context) void, + comptime deinit_fn: fn (this: *Context) void, + comptime setRefUnrefFn: ?fn (this: *Context, enable: bool) void, + comptime drainInternalBuffer: ?fn (this: *Context) bun.ByteList, + comptime memoryCostFn: ?fn (this: *const Context) usize, + comptime toBufferedValue: ?fn (this: *Context, globalThis: *JSC.JSGlobalObject, action: streams.BufferAction.Tag) bun.JSError!JSC.JSValue, +) type { + return struct { + context: Context, + cancelled: bool = false, + ref_count: u32 = 1, + pending_err: ?Syscall.Error = null, + close_handler: ?*const fn (?*anyopaque) void = null, + close_ctx: ?*anyopaque = null, + close_jsvalue: JSC.Strong = .empty, + globalThis: *JSGlobalObject = undefined, + this_jsvalue: JSC.JSValue = .zero, + is_closed: bool = false, + + const This = @This(); + const ReadableStreamSourceType = @This(); + + pub const new = bun.TrivialNew(@This()); + pub const deinit = bun.TrivialDeinit(@This()); + + pub fn pull(this: *This, buf: []u8) streams.Result { + return onPull(&this.context, buf, JSValue.zero); + } + + pub fn ref(this: *This) void { + if (setRefUnrefFn) |setRefUnref| { + setRefUnref(&this.context, true); + } + } + + pub fn unref(this: *This) void { + if (setRefUnrefFn) |setRefUnref| { + setRefUnref(&this.context, false); + } + } + + pub fn setRef(this: *This, value: bool) void { + if (setRefUnrefFn) |setRefUnref| { + setRefUnref(&this.context, value); + } + } + + pub fn start(this: *This) streams.Start { + return onStart(&this.context); + } + + pub fn onPullFromJS(this: *This, buf: []u8, view: JSValue) streams.Result { + return onPull(&this.context, buf, view); + } + + pub fn onStartFromJS(this: *This) streams.Start { + return onStart(&this.context); + } + + pub fn cancel(this: *This) void { + if (this.cancelled) { + return; + } + + this.cancelled = true; + onCancel(&this.context); + } + + pub fn onClose(this: *This) void { + if (this.cancelled) { + return; + } + + if (this.close_handler) |close| { + this.close_handler = null; + if (close == &JSReadableStreamSource.onClose) { + JSReadableStreamSource.onClose(this); + } else { + close(this.close_ctx); + } + } + } + + pub fn incrementCount(this: *This) void { + this.ref_count += 1; + } + + pub fn decrementCount(this: *This) u32 { + if (comptime Environment.isDebug) { + if (this.ref_count == 0) { + @panic("Attempted to decrement ref count below zero"); + } + } + + this.ref_count -= 1; + if (this.ref_count == 0) { + this.close_jsvalue.deinit(); + deinit_fn(&this.context); + return 0; + } + + return this.ref_count; + } + + pub fn getError(this: *This) ?Syscall.Error { + if (this.pending_err) |err| { + this.pending_err = null; + return err; + } + + return null; + } + + pub fn drain(this: *This) bun.ByteList { + if (drainInternalBuffer) |drain_fn| { + return drain_fn(&this.context); + } + + return .{}; + } + + pub fn toReadableStream(this: *ReadableStreamSourceType, globalThis: *JSGlobalObject) JSC.JSValue { + const out_value = brk: { + if (this.this_jsvalue != .zero) { + break :brk this.this_jsvalue; + } + + break :brk this.toJS(globalThis); + }; + out_value.ensureStillAlive(); + this.this_jsvalue = out_value; + return ReadableStream.fromNative(globalThis, out_value); + } + + pub fn setRawModeFromJS(this: *ReadableStreamSourceType, global: *JSC.JSGlobalObject, call_frame: *JSC.CallFrame) bun.JSError!JSValue { + if (@hasDecl(Context, "setRawMode")) { + const flag = call_frame.argument(0); + if (Environment.allow_assert) { + bun.assert(flag.isBoolean()); + } + return switch (this.context.setRawMode(flag == .true)) { + .result => .undefined, + .err => |e| e.toJSC(global), + }; + } + + @compileError("setRawMode is not implemented on " ++ @typeName(Context)); + } + + const supports_ref = setRefUnrefFn != null; + + pub const js = @field(JSC.Codegen, "JS" ++ name_ ++ "InternalReadableStreamSource"); + pub const toJS = js.toJS; + pub const fromJS = js.fromJS; + pub const fromJSDirect = js.fromJSDirect; + + pub const drainFromJS = JSReadableStreamSource.drain; + pub const startFromJS = JSReadableStreamSource.start; + pub const pullFromJS = JSReadableStreamSource.pull; + pub const cancelFromJS = JSReadableStreamSource.cancel; + pub const updateRefFromJS = JSReadableStreamSource.updateRef; + pub const setOnCloseFromJS = JSReadableStreamSource.setOnCloseFromJS; + pub const getOnCloseFromJS = JSReadableStreamSource.getOnCloseFromJS; + pub const setOnDrainFromJS = JSReadableStreamSource.setOnDrainFromJS; + pub const getOnDrainFromJS = JSReadableStreamSource.getOnDrainFromJS; + pub const finalize = JSReadableStreamSource.finalize; + pub const construct = JSReadableStreamSource.construct; + pub const getIsClosedFromJS = JSReadableStreamSource.isClosed; + pub const textFromJS = JSReadableStreamSource.text; + pub const jsonFromJS = JSReadableStreamSource.json; + pub const arrayBufferFromJS = JSReadableStreamSource.arrayBuffer; + pub const blobFromJS = JSReadableStreamSource.blob; + pub const bytesFromJS = JSReadableStreamSource.bytes; + + pub fn memoryCost(this: *const ReadableStreamSourceType) usize { + if (memoryCostFn) |function| { + return function(&this.context) + @sizeOf(@This()); + } + return @sizeOf(@This()); + } + + pub const JSReadableStreamSource = struct { + pub fn pull(this: *ReadableStreamSourceType, globalThis: *JSGlobalObject, callFrame: *JSC.CallFrame) bun.JSError!JSC.JSValue { + JSC.markBinding(@src()); + const this_jsvalue = callFrame.this(); + const arguments = callFrame.arguments_old(2); + const view = arguments.ptr[0]; + view.ensureStillAlive(); + this.this_jsvalue = this_jsvalue; + var buffer = view.asArrayBuffer(globalThis) orelse return .undefined; + return processResult( + this_jsvalue, + globalThis, + arguments.ptr[1], + this.onPullFromJS(buffer.slice(), view), + ); + } + + pub fn start(this: *ReadableStreamSourceType, globalThis: *JSGlobalObject, callFrame: *JSC.CallFrame) bun.JSError!JSC.JSValue { + JSC.markBinding(@src()); + this.globalThis = globalThis; + this.this_jsvalue = callFrame.this(); + switch (this.onStartFromJS()) { + .empty => return JSValue.jsNumber(0), + .ready => return JSValue.jsNumber(16384), + .chunk_size => |size| return JSValue.jsNumber(size), + .err => |err| { + return globalThis.throwValue(err.toJSC(globalThis)); + }, + else => |rc| { + return rc.toJS(globalThis); + }, + } + } + + pub fn isClosed(this: *ReadableStreamSourceType, globalObject: *JSC.JSGlobalObject) JSC.JSValue { + _ = globalObject; // autofix + return JSC.JSValue.jsBoolean(this.is_closed); + } + + fn processResult(this_jsvalue: JSC.JSValue, globalThis: *JSGlobalObject, flags: JSValue, result: streams.Result) bun.JSError!JSC.JSValue { + switch (result) { + .err => |err| { + if (err == .Error) { + return globalThis.throwValue(err.Error.toJSC(globalThis)); + } else { + const js_err = err.JSValue; + js_err.ensureStillAlive(); + js_err.unprotect(); + return globalThis.throwValue(js_err); + } + }, + .pending => { + const out = result.toJS(globalThis); + js.pendingPromiseSetCached(this_jsvalue, globalThis, out); + return out; + }, + .temporary_and_done, .owned_and_done, .into_array_and_done => { + JSC.C.JSObjectSetPropertyAtIndex(globalThis, flags.asObjectRef(), 0, JSValue.jsBoolean(true).asObjectRef(), null); + return result.toJS(globalThis); + }, + else => return result.toJS(globalThis), + } + } + + pub fn cancel(this: *ReadableStreamSourceType, globalObject: *JSC.JSGlobalObject, callFrame: *JSC.CallFrame) bun.JSError!JSC.JSValue { + _ = globalObject; // autofix + JSC.markBinding(@src()); + this.this_jsvalue = callFrame.this(); + this.cancel(); + return .undefined; + } + + pub fn setOnCloseFromJS(this: *ReadableStreamSourceType, globalObject: *JSC.JSGlobalObject, value: JSC.JSValue) bool { + JSC.markBinding(@src()); + this.close_handler = JSReadableStreamSource.onClose; + this.globalThis = globalObject; + + if (value.isUndefined()) { + this.close_jsvalue.deinit(); + return true; + } + + if (!value.isCallable()) { + globalObject.throwInvalidArgumentType("ReadableStreamSource", "onclose", "function") catch {}; + return false; + } + const cb = value.withAsyncContextIfNeeded(globalObject); + this.close_jsvalue.set(globalObject, cb); + return true; + } + + pub fn setOnDrainFromJS(this: *ReadableStreamSourceType, globalObject: *JSC.JSGlobalObject, value: JSC.JSValue) bool { + JSC.markBinding(@src()); + this.globalThis = globalObject; + + if (value.isUndefined()) { + js.onDrainCallbackSetCached(this.this_jsvalue, globalObject, .undefined); + return true; + } + + if (!value.isCallable()) { + globalObject.throwInvalidArgumentType("ReadableStreamSource", "onDrain", "function") catch {}; + return false; + } + const cb = value.withAsyncContextIfNeeded(globalObject); + js.onDrainCallbackSetCached(this.this_jsvalue, globalObject, cb); + return true; + } + + pub fn getOnCloseFromJS(this: *ReadableStreamSourceType, globalObject: *JSC.JSGlobalObject) JSC.JSValue { + _ = globalObject; // autofix + + JSC.markBinding(@src()); + + return this.close_jsvalue.get() orelse .undefined; + } + + pub fn getOnDrainFromJS(this: *ReadableStreamSourceType, globalObject: *JSC.JSGlobalObject) JSC.JSValue { + _ = globalObject; // autofix + + JSC.markBinding(@src()); + + if (js.onDrainCallbackGetCached(this.this_jsvalue)) |val| { + return val; + } + + return .undefined; + } + + pub fn updateRef(this: *ReadableStreamSourceType, globalObject: *JSGlobalObject, callFrame: *JSC.CallFrame) bun.JSError!JSC.JSValue { + _ = globalObject; // autofix + JSC.markBinding(@src()); + this.this_jsvalue = callFrame.this(); + const ref_or_unref = callFrame.argument(0).toBoolean(); + this.setRef(ref_or_unref); + + return .undefined; + } + + fn onClose(ptr: ?*anyopaque) void { + JSC.markBinding(@src()); + var this = bun.cast(*ReadableStreamSourceType, ptr.?); + if (this.close_jsvalue.trySwap()) |cb| { + this.globalThis.queueMicrotask(cb, &.{}); + } + + this.close_jsvalue.deinit(); + } + + pub fn finalize(this: *ReadableStreamSourceType) void { + this.this_jsvalue = .zero; + + _ = this.decrementCount(); + } + + pub fn drain(this: *ReadableStreamSourceType, globalThis: *JSGlobalObject, callFrame: *JSC.CallFrame) bun.JSError!JSC.JSValue { + JSC.markBinding(@src()); + this.this_jsvalue = callFrame.this(); + var list = this.drain(); + if (list.len > 0) { + return JSC.ArrayBuffer.fromBytes(list.slice(), .Uint8Array).toJS(globalThis, null); + } + return JSValue.jsUndefined(); + } + + pub fn text(this: *ReadableStreamSourceType, globalThis: *JSGlobalObject, callFrame: *JSC.CallFrame) bun.JSError!JSC.JSValue { + JSC.markBinding(@src()); + this.this_jsvalue = callFrame.this(); + + if (toBufferedValue) |to_buffered_value| { + return to_buffered_value(&this.context, globalThis, .text); + } + + globalThis.throwTODO("This is not implemented yet"); + return .zero; + } + + pub fn arrayBuffer(this: *ReadableStreamSourceType, globalThis: *JSGlobalObject, callFrame: *JSC.CallFrame) bun.JSError!JSC.JSValue { + JSC.markBinding(@src()); + this.this_jsvalue = callFrame.this(); + + if (toBufferedValue) |to_buffered_value| { + return to_buffered_value(&this.context, globalThis, .arrayBuffer); + } + + globalThis.throwTODO("This is not implemented yet"); + return .zero; + } + + pub fn blob(this: *ReadableStreamSourceType, globalThis: *JSGlobalObject, callFrame: *JSC.CallFrame) bun.JSError!JSC.JSValue { + JSC.markBinding(@src()); + this.this_jsvalue = callFrame.this(); + + if (toBufferedValue) |to_buffered_value| { + return to_buffered_value(&this.context, globalThis, .blob); + } + + globalThis.throwTODO("This is not implemented yet"); + return .zero; + } + + pub fn bytes(this: *ReadableStreamSourceType, globalThis: *JSGlobalObject, callFrame: *JSC.CallFrame) bun.JSError!JSC.JSValue { + JSC.markBinding(@src()); + this.this_jsvalue = callFrame.this(); + + if (toBufferedValue) |to_buffered_value| { + return to_buffered_value(&this.context, globalThis, .bytes); + } + + globalThis.throwTODO("This is not implemented yet"); + return .zero; + } + + pub fn json(this: *ReadableStreamSourceType, globalThis: *JSGlobalObject, callFrame: *JSC.CallFrame) bun.JSError!JSC.JSValue { + JSC.markBinding(@src()); + this.this_jsvalue = callFrame.this(); + + if (toBufferedValue) |to_buffered_value| { + return to_buffered_value(&this.context, globalThis, .json); + } + + globalThis.throwTODO("This is not implemented yet"); + return .zero; + } + }; + }; +} + +const bun = @import("bun"); +const Environment = bun.Environment; +const JSC = bun.jsc; +const JSValue = JSC.JSValue; +const JSGlobalObject = JSC.JSGlobalObject; +const Syscall = bun.sys; +const webcore = bun.webcore; +const streams = webcore.streams; +const Blob = webcore.Blob; diff --git a/src/bun.js/webcore/Request.zig b/src/bun.js/webcore/Request.zig new file mode 100644 index 0000000000..f042290c92 --- /dev/null +++ b/src/bun.js/webcore/Request.zig @@ -0,0 +1,979 @@ +//! https://developer.mozilla.org/en-US/docs/Web/API/Request +const Request = @This(); + +url: bun.String = bun.String.empty, +// NOTE(@cirospaciari): renamed to _headers to avoid direct manipulation, use getFetchHeaders, setFetchHeaders, ensureFetchHeaders and hasFetchHeaders instead +_headers: ?*FetchHeaders = null, +signal: ?*AbortSignal = null, +body: *Body.Value.HiveRef, +method: Method = Method.GET, +request_context: JSC.API.AnyRequestContext = JSC.API.AnyRequestContext.Null, +https: bool = false, +weak_ptr_data: WeakRef.Data = .empty, +// We must report a consistent value for this +reported_estimated_size: usize = 0, +internal_event_callback: InternalJSEventCallback = .{}, + +pub const js = JSC.Codegen.JSRequest; +// NOTE: toJS is overridden +pub const fromJS = js.fromJS; +pub const fromJSDirect = js.fromJSDirect; + +pub const new = bun.TrivialNew(@This()); + +const RequestMixin = BodyMixin(@This()); +pub const getText = RequestMixin.getText; +pub const getBytes = RequestMixin.getBytes; +pub const getBody = RequestMixin.getBody; +pub const getBodyUsed = RequestMixin.getBodyUsed; +pub const getJSON = RequestMixin.getJSON; +pub const getArrayBuffer = RequestMixin.getArrayBuffer; +pub const getBlob = RequestMixin.getBlob; +pub const getFormData = RequestMixin.getFormData; +pub const getBlobWithoutCallFrame = RequestMixin.getBlobWithoutCallFrame; +pub const WeakRef = bun.ptr.WeakPtr(Request, "weak_ptr_data"); + +pub fn memoryCost(this: *const Request) usize { + return @sizeOf(Request) + this.request_context.memoryCost() + this.url.byteSlice().len + this.body.value.memoryCost(); +} + +pub export fn Request__setCookiesOnRequestContext(this: *Request, cookieMap: ?*JSC.WebCore.CookieMap) void { + this.request_context.setCookies(cookieMap); +} + +pub export fn Request__getUWSRequest( + this: *Request, +) ?*uws.Request { + return this.request_context.getRequest(); +} + +pub export fn Request__setInternalEventCallback( + this: *Request, + callback: JSC.JSValue, + globalThis: *JSC.JSGlobalObject, +) void { + this.internal_event_callback = InternalJSEventCallback.init(callback, globalThis); + // we always have the abort event but we need to enable the timeout event as well in case of `node:http`.Server.setTimeout is set + this.request_context.enableTimeoutEvents(); +} + +pub export fn Request__setTimeout(this: *Request, seconds: JSC.JSValue, globalThis: *JSC.JSGlobalObject) void { + if (!seconds.isNumber()) { + globalThis.throw("Failed to set timeout: The provided value is not of type 'number'.", .{}) catch {}; + return; + } + + this.setTimeout(seconds.to(c_uint)); +} + +comptime { + _ = Request__getUWSRequest; + _ = Request__setInternalEventCallback; + _ = Request__setTimeout; +} + +pub const InternalJSEventCallback = struct { + function: JSC.Strong = .empty, + + pub const EventType = JSC.API.NodeHTTPResponse.AbortEvent; + + pub fn init(function: JSC.JSValue, globalThis: *JSC.JSGlobalObject) InternalJSEventCallback { + return InternalJSEventCallback{ + .function = JSC.Strong.create(function, globalThis), + }; + } + + pub fn hasCallback(this: *InternalJSEventCallback) bool { + return this.function.has(); + } + + pub fn trigger(this: *InternalJSEventCallback, eventType: EventType, globalThis: *JSC.JSGlobalObject) bool { + if (this.function.get()) |callback| { + _ = callback.call(globalThis, JSC.JSValue.jsUndefined(), &.{JSC.JSValue.jsNumber( + @intFromEnum(eventType), + )}) catch |err| globalThis.reportActiveExceptionAsUnhandled(err); + return true; + } + return false; + } + + pub fn deinit(this: *InternalJSEventCallback) void { + this.function.deinit(); + } +}; + +pub fn init( + url: bun.String, + headers: ?*FetchHeaders, + body: *Body.Value.HiveRef, + method: Method, +) Request { + return Request{ + .url = url, + ._headers = headers, + .body = body, + .method = method, + }; +} + +pub fn getContentType( + this: *Request, +) ?ZigString.Slice { + if (this.request_context.getRequest()) |req| { + if (req.header("content-type")) |value| { + return ZigString.Slice.fromUTF8NeverFree(value); + } + } + + if (this._headers) |headers| { + if (headers.fastGet(.ContentType)) |value| { + return value.toSlice(bun.default_allocator); + } + } + + if (this.body.value == .Blob) { + if (this.body.value.Blob.content_type.len > 0) + return ZigString.Slice.fromUTF8NeverFree(this.body.value.Blob.content_type); + } + + return null; +} + +pub fn getFormDataEncoding(this: *Request) ?*bun.FormData.AsyncFormData { + var content_type_slice: ZigString.Slice = this.getContentType() orelse return null; + defer content_type_slice.deinit(); + const encoding = bun.FormData.Encoding.get(content_type_slice.slice()) orelse return null; + return bun.FormData.AsyncFormData.init(bun.default_allocator, encoding) catch unreachable; +} + +pub fn estimatedSize(this: *Request) callconv(.C) usize { + return this.reported_estimated_size; +} + +pub fn getRemoteSocketInfo(this: *Request, globalObject: *JSC.JSGlobalObject) ?JSC.JSValue { + if (this.request_context.getRemoteSocketInfo()) |info| { + return JSC.JSSocketAddress.create(globalObject, info.ip, info.port, info.is_ipv6); + } + + return null; +} + +pub fn calculateEstimatedByteSize(this: *Request) void { + this.reported_estimated_size = this.body.value.estimatedSize() + this.sizeOfURL() + @sizeOf(Request); +} + +pub export fn Bun__JSRequest__calculateEstimatedByteSize(this: *Request) void { + this.calculateEstimatedByteSize(); +} + +pub fn toJS(this: *Request, globalObject: *JSGlobalObject) JSValue { + this.calculateEstimatedByteSize(); + return js.toJSUnchecked(globalObject, this); +} + +extern "JS" fn Bun__getParamsIfBunRequest(this_value: JSValue) JSValue; + +pub fn writeFormat(this: *Request, this_value: JSValue, comptime Formatter: type, formatter: *Formatter, writer: anytype, comptime enable_ansi_colors: bool) !void { + const Writer = @TypeOf(writer); + + const params_object = Bun__getParamsIfBunRequest(this_value); + + const class_label = switch (params_object) { + .zero => "Request", + else => "BunRequest", + }; + try writer.print("{s} ({}) {{\n", .{ class_label, bun.fmt.size(this.body.value.size(), .{}) }); + { + formatter.indent += 1; + defer formatter.indent -|= 1; + + try formatter.writeIndent(Writer, writer); + try writer.writeAll(comptime Output.prettyFmt("method: \"", enable_ansi_colors)); + + try writer.writeAll(bun.asByteSlice(@tagName(this.method))); + try writer.writeAll("\""); + formatter.printComma(Writer, writer, enable_ansi_colors) catch unreachable; + try writer.writeAll("\n"); + + try formatter.writeIndent(Writer, writer); + try writer.writeAll(comptime Output.prettyFmt("url: ", enable_ansi_colors)); + try this.ensureURL(); + try writer.print(comptime Output.prettyFmt("\"{}\"", enable_ansi_colors), .{this.url}); + formatter.printComma(Writer, writer, enable_ansi_colors) catch unreachable; + try writer.writeAll("\n"); + + if (params_object.isCell()) { + try formatter.writeIndent(Writer, writer); + try writer.writeAll(comptime Output.prettyFmt("params: ", enable_ansi_colors)); + try formatter.printAs(.Private, Writer, writer, params_object, .Object, enable_ansi_colors); + formatter.printComma(Writer, writer, enable_ansi_colors) catch unreachable; + try writer.writeAll("\n"); + } + + try formatter.writeIndent(Writer, writer); + try writer.writeAll(comptime Output.prettyFmt("headers: ", enable_ansi_colors)); + try formatter.printAs(.Private, Writer, writer, this.getHeaders(formatter.globalThis), .DOMWrapper, enable_ansi_colors); + + if (this.body.value == .Blob) { + try writer.writeAll("\n"); + try formatter.writeIndent(Writer, writer); + try this.body.value.Blob.writeFormat(Formatter, formatter, writer, enable_ansi_colors); + } else if (this.body.value == .InternalBlob or this.body.value == .WTFStringImpl) { + try writer.writeAll("\n"); + try formatter.writeIndent(Writer, writer); + const size = this.body.value.size(); + if (size == 0) { + var empty = Blob.initEmpty(undefined); + try empty.writeFormat(Formatter, formatter, writer, enable_ansi_colors); + } else { + try Blob.writeFormatForSize(false, size, writer, enable_ansi_colors); + } + } else if (this.body.value == .Locked) { + if (this.body.value.Locked.readable.get(this.body.value.Locked.global)) |stream| { + try writer.writeAll("\n"); + try formatter.writeIndent(Writer, writer); + try formatter.printAs(.Object, Writer, writer, stream.value, stream.value.jsType(), enable_ansi_colors); + } + } + } + try writer.writeAll("\n"); + try formatter.writeIndent(Writer, writer); + try writer.writeAll("}"); +} + +pub fn mimeType(this: *const Request) string { + if (this._headers) |headers| { + if (headers.fastGet(.ContentType)) |content_type| { + return content_type.slice(); + } + } + + switch (this.body.value) { + .Blob => |blob| { + if (blob.content_type.len > 0) { + return blob.content_type; + } + + return MimeType.other.value; + }, + .InternalBlob => return this.body.value.InternalBlob.contentType(), + .WTFStringImpl => return MimeType.text.value, + // .InlineBlob => return this.body.value.InlineBlob.contentType(), + .Null, .Error, .Used, .Locked, .Empty => return MimeType.other.value, + } +} + +pub fn getCache( + _: *Request, + globalThis: *JSC.JSGlobalObject, +) JSC.JSValue { + return ZigString.init("default").toJS(globalThis); +} +pub fn getCredentials( + _: *Request, + globalThis: *JSC.JSGlobalObject, +) JSC.JSValue { + return ZigString.init("include").toJS(globalThis); +} +pub fn getDestination( + _: *Request, + globalThis: *JSC.JSGlobalObject, +) JSC.JSValue { + return ZigString.init("").toJS(globalThis); +} + +pub fn getIntegrity( + _: *Request, + globalThis: *JSC.JSGlobalObject, +) JSC.JSValue { + return ZigString.Empty.toJS(globalThis); +} + +pub fn getSignal(this: *Request, globalThis: *JSC.JSGlobalObject) JSC.JSValue { + // Already have an C++ instance + if (this.signal) |signal| { + return signal.toJS(globalThis); + } else { + //Lazy create default signal + const js_signal = AbortSignal.create(globalThis); + js_signal.ensureStillAlive(); + if (AbortSignal.fromJS(js_signal)) |signal| { + this.signal = signal.ref(); + } + return js_signal; + } +} + +pub fn getMethod( + this: *Request, + globalThis: *JSC.JSGlobalObject, +) JSC.JSValue { + return this.method.toJS(globalThis); +} + +pub fn getMode( + _: *Request, + globalThis: *JSC.JSGlobalObject, +) JSC.JSValue { + return ZigString.init("navigate").toJS(globalThis); +} + +pub fn finalizeWithoutDeinit(this: *Request) void { + if (this._headers) |headers| { + headers.deref(); + this._headers = null; + } + + this.url.deref(); + this.url = bun.String.empty; + + if (this.signal) |signal| { + signal.unref(); + this.signal = null; + } + this.internal_event_callback.deinit(); +} + +pub fn finalize(this: *Request) void { + this.finalizeWithoutDeinit(); + _ = this.body.unref(); + if (this.weak_ptr_data.onFinalize()) { + bun.destroy(this); + } +} + +pub fn getRedirect( + _: *Request, + globalThis: *JSC.JSGlobalObject, +) JSC.JSValue { + return ZigString.init("follow").toJS(globalThis); +} +pub fn getReferrer( + this: *Request, + globalObject: *JSC.JSGlobalObject, +) JSC.JSValue { + if (this._headers) |headers_ref| { + if (headers_ref.get("referrer", globalObject)) |referrer| { + return ZigString.init(referrer).toJS(globalObject); + } + } + + return ZigString.init("").toJS(globalObject); +} +pub fn getReferrerPolicy( + _: *Request, + globalThis: *JSC.JSGlobalObject, +) JSC.JSValue { + return ZigString.init("").toJS(globalThis); +} +pub fn getUrl(this: *Request, globalObject: *JSC.JSGlobalObject) JSC.JSValue { + this.ensureURL() catch { + globalObject.throw("Failed to join URL", .{}) catch {}; // TODO: propagate + return .zero; + }; + + return this.url.toJS(globalObject); +} + +pub fn sizeOfURL(this: *const Request) usize { + if (this.url.length() > 0) + return this.url.byteSlice().len; + + if (this.request_context.getRequest()) |req| { + const req_url = req.url(); + if (req_url.len > 0 and req_url[0] == '/') { + if (req.header("host")) |host| { + const fmt = bun.fmt.HostFormatter{ + .is_https = this.https, + .host = host, + }; + return this.getProtocol().len + req_url.len + std.fmt.count("{any}", .{fmt}); + } + } + return req_url.len; + } + + return 0; +} + +pub fn getProtocol(this: *const Request) []const u8 { + if (this.https) + return "https://"; + + return "http://"; +} + +pub fn ensureURL(this: *Request) !void { + if (!this.url.isEmpty()) return; + + if (this.request_context.getRequest()) |req| { + const req_url = req.url(); + if (req_url.len > 0 and req_url[0] == '/') { + if (req.header("host")) |host| { + const fmt = bun.fmt.HostFormatter{ + .is_https = this.https, + .host = host, + }; + const url_bytelength = std.fmt.count("{s}{any}{s}", .{ + this.getProtocol(), + fmt, + req_url, + }); + + if (comptime Environment.allow_assert) { + bun.assert(this.sizeOfURL() == url_bytelength); + } + + if (url_bytelength < 128) { + var buffer: [128]u8 = undefined; + const url = std.fmt.bufPrint(&buffer, "{s}{any}{s}", .{ + this.getProtocol(), + fmt, + req_url, + }) catch @panic("Unexpected error while printing URL"); + + if (comptime Environment.allow_assert) { + bun.assert(this.sizeOfURL() == url.len); + } + + var href = bun.JSC.URL.hrefFromString(bun.String.fromBytes(url)); + if (!href.isEmpty()) { + if (href.byteSlice().ptr == url.ptr) { + this.url = bun.String.createLatin1(url[0..href.length()]); + href.deref(); + } else { + this.url = href; + } + } else { + // TODO: what is the right thing to do for invalid URLS? + this.url = bun.String.createUTF8(url); + } + + return; + } + + if (strings.isAllASCII(host) and strings.isAllASCII(req_url)) { + this.url, const bytes = bun.String.createUninitialized(.latin1, url_bytelength); + _ = std.fmt.bufPrint(bytes, "{s}{any}{s}", .{ + this.getProtocol(), + fmt, + req_url, + }) catch |err| switch (err) { + error.NoSpaceLeft => unreachable, // exact space should have been counted + }; + } else { + // slow path + const temp_url = std.fmt.allocPrint(bun.default_allocator, "{s}{any}{s}", .{ + this.getProtocol(), + fmt, + req_url, + }) catch bun.outOfMemory(); + defer bun.default_allocator.free(temp_url); + this.url = bun.String.createUTF8(temp_url); + } + + const href = bun.JSC.URL.hrefFromString(this.url); + // TODO: what is the right thing to do for invalid URLS? + if (!href.isEmpty()) { + this.url.deref(); + this.url = href; + } + + return; + } + } + + if (comptime Environment.allow_assert) { + bun.assert(this.sizeOfURL() == req_url.len); + } + this.url = bun.String.createUTF8(req_url); + } +} + +const Fields = enum { + method, + headers, + body, + // referrer, + // referrerPolicy, + // mode, + // credentials, + // redirect, + // integrity, + // keepalive, + signal, + // proxy, + // timeout, + url, +}; + +pub fn constructInto(globalThis: *JSC.JSGlobalObject, arguments: []const JSC.JSValue) bun.JSError!Request { + var success = false; + const vm = globalThis.bunVM(); + const body = try vm.initRequestBodyValue(.{ .Null = {} }); + var req = Request{ + .body = body, + }; + defer { + if (!success) { + req.finalizeWithoutDeinit(); + _ = req.body.unref(); + } + if (req.body != body) { + _ = body.unref(); + } + } + + if (arguments.len == 0) { + return globalThis.throw("Failed to construct 'Request': 1 argument required, but only 0 present.", .{}); + } else if (arguments[0].isEmptyOrUndefinedOrNull() or !arguments[0].isCell()) { + return globalThis.throw("Failed to construct 'Request': expected non-empty string or object, got undefined", .{}); + } + + const url_or_object = arguments[0]; + const url_or_object_type = url_or_object.jsType(); + var fields = std.EnumSet(Fields).initEmpty(); + + const is_first_argument_a_url = + // fastest path: + url_or_object_type.isStringLike() or + // slower path: + url_or_object.as(JSC.DOMURL) != null; + + if (is_first_argument_a_url) { + const str = try bun.String.fromJS(arguments[0], globalThis); + req.url = str; + + if (!req.url.isEmpty()) + fields.insert(.url); + } else if (!url_or_object_type.isObject()) { + return globalThis.throw("Failed to construct 'Request': expected non-empty string or object", .{}); + } + + const values_to_try_ = [_]JSValue{ + if (arguments.len > 1 and arguments[1].isObject()) + arguments[1] + else if (is_first_argument_a_url) + JSValue.undefined + else + url_or_object, + if (is_first_argument_a_url) JSValue.undefined else url_or_object, + }; + const values_to_try = values_to_try_[0 .. @as(usize, @intFromBool(!is_first_argument_a_url)) + + @as(usize, @intFromBool(arguments.len > 1 and arguments[1].isObject()))]; + for (values_to_try) |value| { + const value_type = value.jsType(); + const explicit_check = values_to_try.len == 2 and value_type == .FinalObject and values_to_try[1].jsType() == .DOMWrapper; + if (value_type == .DOMWrapper) { + if (value.asDirect(Request)) |request| { + if (values_to_try.len == 1) { + request.cloneInto(&req, globalThis.allocator(), globalThis, fields.contains(.url)); + success = true; + return req; + } + + if (!fields.contains(.method)) { + req.method = request.method; + fields.insert(.method); + } + + if (!fields.contains(.headers)) { + if (request.cloneHeaders(globalThis)) |headers| { + req._headers = headers; + fields.insert(.headers); + } + + if (globalThis.hasException()) return error.JSError; + } + + if (!fields.contains(.body)) { + switch (request.body.value) { + .Null, .Empty, .Used => {}, + else => { + req.body.value = request.body.value.clone(globalThis); + if (globalThis.hasException()) return error.JSError; + fields.insert(.body); + }, + } + } + } + + if (value.asDirect(Response)) |response| { + if (!fields.contains(.method)) { + req.method = response.init.method; + fields.insert(.method); + } + + if (!fields.contains(.headers)) { + if (response.init.headers) |headers| { + req._headers = headers.cloneThis(globalThis); + fields.insert(.headers); + } + } + + if (!fields.contains(.url)) { + if (!response.url.isEmpty()) { + req.url = response.url.dupeRef(); + fields.insert(.url); + } + } + + if (!fields.contains(.body)) { + switch (response.body.value) { + .Null, .Empty, .Used => {}, + else => { + req.body.value = response.body.value.clone(globalThis); + fields.insert(.body); + }, + } + } + + if (globalThis.hasException()) return error.JSError; + } + } + + if (!fields.contains(.body)) { + if (value.fastGet(globalThis, .body)) |body_| { + fields.insert(.body); + req.body.value = try Body.Value.fromJS(globalThis, body_); + } + + if (globalThis.hasException()) return error.JSError; + } + + if (!fields.contains(.url)) { + if (value.fastGet(globalThis, .url)) |url| { + req.url = try bun.String.fromJS(url, globalThis); + if (!req.url.isEmpty()) + fields.insert(.url); + + // first value + } else if (@intFromEnum(value) == @intFromEnum(values_to_try[values_to_try.len - 1]) and !is_first_argument_a_url and + value.implementsToString(globalThis)) + { + const str = try bun.String.fromJS(value, globalThis); + req.url = str; + if (!req.url.isEmpty()) + fields.insert(.url); + } + + if (globalThis.hasException()) return error.JSError; + } + + if (!fields.contains(.signal)) { + if (try value.getTruthy(globalThis, "signal")) |signal_| { + fields.insert(.signal); + if (AbortSignal.fromJS(signal_)) |signal| { + //Keep it alive + signal_.ensureStillAlive(); + req.signal = signal.ref(); + } else { + if (!globalThis.hasException()) { + return globalThis.throw("Failed to construct 'Request': signal is not of type AbortSignal.", .{}); + } + return error.JSError; + } + } + + if (globalThis.hasException()) return error.JSError; + } + + if (!fields.contains(.method) or !fields.contains(.headers)) { + if (globalThis.hasException()) return error.JSError; + if (try Response.Init.init(globalThis, value)) |response_init| { + if (!explicit_check or (explicit_check and value.fastGet(globalThis, .headers) != null)) { + if (response_init.headers) |headers| { + if (!fields.contains(.headers)) { + req._headers = headers; + fields.insert(.headers); + } else { + headers.deref(); + } + } + } + + if (globalThis.hasException()) return error.JSError; + + if (!explicit_check or (explicit_check and value.fastGet(globalThis, .method) != null)) { + if (!fields.contains(.method)) { + req.method = response_init.method; + fields.insert(.method); + } + } + if (globalThis.hasException()) return error.JSError; + } + + if (globalThis.hasException()) return error.JSError; + } + } + + if (globalThis.hasException()) { + return error.JSError; + } + + if (req.url.isEmpty()) { + return globalThis.throw("Failed to construct 'Request': url is required.", .{}); + } + + const href = JSC.URL.hrefFromString(req.url); + if (href.isEmpty()) { + if (!globalThis.hasException()) { + // globalThis.throw can cause GC, which could cause the above string to be freed. + // so we must increment the reference count before calling it. + return globalThis.ERR(.INVALID_URL, "Failed to construct 'Request': Invalid URL \"{}\"", .{req.url}).throw(); + } + return error.JSError; + } + + // hrefFromString increments the reference count if they end up being + // the same + // + // we increment the reference count on usage above, so we must + // decrement it to be perfectly balanced. + req.url.deref(); + + req.url = href; + + if (req.body.value == .Blob and + req._headers != null and + req.body.value.Blob.content_type.len > 0 and + !req._headers.?.fastHas(.ContentType)) + { + req._headers.?.put(.ContentType, req.body.value.Blob.content_type, globalThis); + } + + req.calculateEstimatedByteSize(); + success = true; + + return req; +} + +pub fn constructor(globalThis: *JSC.JSGlobalObject, callframe: *JSC.CallFrame) bun.JSError!*Request { + const arguments_ = callframe.arguments_old(2); + const arguments = arguments_.ptr[0..arguments_.len]; + + const request = try constructInto(globalThis, arguments); + return Request.new(request); +} + +pub fn getBodyValue( + this: *Request, +) *Body.Value { + return &this.body.value; +} + +pub fn doClone( + this: *Request, + globalThis: *JSC.JSGlobalObject, + callframe: *JSC.CallFrame, +) bun.JSError!JSC.JSValue { + const this_value = callframe.this(); + var cloned = this.clone(bun.default_allocator, globalThis); + + if (globalThis.hasException()) { + cloned.finalize(); + return .zero; + } + + const js_wrapper = cloned.toJS(globalThis); + if (js_wrapper != .zero) { + if (cloned.body.value == .Locked) { + if (cloned.body.value.Locked.readable.get(globalThis)) |readable| { + // If we are teed, then we need to update the cached .body + // value to point to the new readable stream + // We must do this on both the original and cloned request + // but especially the original request since it will have a stale .body value now. + js.bodySetCached(js_wrapper, globalThis, readable.value); + if (this.body.value.Locked.readable.get(globalThis)) |other_readable| { + js.bodySetCached(this_value, globalThis, other_readable.value); + } + } + } + } + + return js_wrapper; +} + +// Returns if the request has headers already cached/set. +pub fn hasFetchHeaders(this: *Request) bool { + return this._headers != null; +} + +/// Sets the headers of the request. This will take ownership of the headers. +/// it will deref the previous headers if they exist. +pub fn setFetchHeaders( + this: *Request, + headers: ?*FetchHeaders, +) void { + if (this._headers) |old_headers| { + old_headers.deref(); + } + + this._headers = headers; +} + +/// Returns the headers of the request. If the headers are not already cached, it will create a new FetchHeaders object. +/// If the headers are empty, it will look at request_context to get the headers. +/// If the headers are empty and request_context is null, it will create an empty FetchHeaders object. +pub fn ensureFetchHeaders( + this: *Request, + globalThis: *JSC.JSGlobalObject, +) *FetchHeaders { + if (this._headers) |headers| { + // headers is already set + return headers; + } + + if (this.request_context.getRequest()) |req| { + // we have a request context, so we can get the headers from it + this._headers = FetchHeaders.createFromUWS(req); + } else { + // we don't have a request context, so we need to create an empty headers object + this._headers = FetchHeaders.createEmpty(); + + if (this.body.value == .Blob) { + const content_type = this.body.value.Blob.content_type; + if (content_type.len > 0) { + this._headers.?.put(.ContentType, content_type, globalThis); + } + } + } + + return this._headers.?; +} + +pub fn getFetchHeadersUnlessEmpty( + this: *Request, +) ?*FetchHeaders { + if (this._headers == null) { + if (this.request_context.getRequest()) |req| { + // we have a request context, so we can get the headers from it + this._headers = FetchHeaders.createFromUWS(req); + } + } + + const headers = this._headers orelse return null; + if (headers.isEmpty()) { + return null; + } + return headers; +} + +/// Returns the headers of the request. This will not look at the request contex to get the headers. +pub fn getFetchHeaders( + this: *Request, +) ?*FetchHeaders { + return this._headers; +} + +/// This should only be called by the JS code. use getFetchHeaders to get the current headers or ensureFetchHeaders to get the headers and create them if they don't exist. +pub fn getHeaders( + this: *Request, + globalThis: *JSC.JSGlobalObject, +) JSC.JSValue { + return this.ensureFetchHeaders(globalThis).toJS(globalThis); +} + +pub fn cloneHeaders(this: *Request, globalThis: *JSGlobalObject) ?*FetchHeaders { + if (this._headers == null) { + if (this.request_context.getRequest()) |uws_req| { + this._headers = FetchHeaders.createFromUWS(uws_req); + } + } + + if (this._headers) |head| { + if (head.isEmpty()) { + return null; + } + + return head.cloneThis(globalThis); + } + + return null; +} + +pub fn cloneInto( + this: *Request, + req: *Request, + allocator: std.mem.Allocator, + globalThis: *JSGlobalObject, + preserve_url: bool, +) void { + _ = allocator; + this.ensureURL() catch {}; + const vm = globalThis.bunVM(); + const body = vm.initRequestBodyValue(this.body.value.clone(globalThis)) catch { + if (!globalThis.hasException()) { + globalThis.throw("Failed to clone request", .{}) catch {}; + } + return; + }; + const original_url = req.url; + + req.* = Request{ + .body = body, + .url = if (preserve_url) original_url else this.url.dupeRef(), + .method = this.method, + ._headers = this.cloneHeaders(globalThis), + }; + + if (this.signal) |signal| { + req.signal = signal.ref(); + } +} + +pub fn clone(this: *Request, allocator: std.mem.Allocator, globalThis: *JSGlobalObject) *Request { + const req = Request.new(undefined); + this.cloneInto(req, allocator, globalThis, false); + return req; +} + +pub fn setTimeout( + this: *Request, + seconds: c_uint, +) void { + _ = this.request_context.setTimeout(seconds); +} + +const std = @import("std"); +const Api = @import("../../api/schema.zig").Api; +const bun = @import("bun"); +const MimeType = bun.http.MimeType; +const ZigURL = @import("../../url.zig").URL; +const HTTPClient = bun.http; +const JSC = bun.JSC; + +const Method = @import("../../http/method.zig").Method; +const FetchHeaders = bun.webcore.FetchHeaders; +const AbortSignal = JSC.WebCore.AbortSignal; +const ObjectPool = @import("../../pool.zig").ObjectPool; +const SystemError = JSC.SystemError; +const Output = bun.Output; +const MutableString = bun.MutableString; +const strings = bun.strings; +const string = bun.string; +const default_allocator = bun.default_allocator; +const FeatureFlags = bun.FeatureFlags; + +const Environment = @import("../../env.zig"); +const ZigString = JSC.ZigString; +const IdentityContext = @import("../../identity_context.zig").IdentityContext; +const JSPromise = JSC.JSPromise; +const JSValue = JSC.JSValue; +const JSGlobalObject = JSC.JSGlobalObject; +const NullableAllocator = bun.NullableAllocator; + +const VirtualMachine = JSC.VirtualMachine; +const Task = JSC.Task; +const JSPrinter = bun.js_printer; +const picohttp = bun.picohttp; +const StringJoiner = bun.StringJoiner; +const uws = bun.uws; + +const InlineBlob = JSC.WebCore.Blob.Inline; +const AnyBlob = JSC.WebCore.Blob.Any; +const InternalBlob = JSC.WebCore.Blob.Internal; +const BodyMixin = JSC.WebCore.Body.Mixin; +const Body = JSC.WebCore.Body; +const Blob = JSC.WebCore.Blob; +const Response = JSC.WebCore.Response; diff --git a/src/bun.js/webcore/Response.zig b/src/bun.js/webcore/Response.zig new file mode 100644 index 0000000000..ec967b3ff6 --- /dev/null +++ b/src/bun.js/webcore/Response.zig @@ -0,0 +1,775 @@ +const Response = @This(); + +const ResponseMixin = BodyMixin(@This()); +pub const js = JSC.Codegen.JSResponse; +// NOTE: toJS is overridden +pub const fromJS = js.fromJS; +pub const fromJSDirect = js.fromJSDirect; + +body: Body, +init: Init, +url: bun.String = bun.String.empty, +redirected: bool = false, +/// We increment this count in fetch so if JS Response is discarted we can resolve the Body +/// In the server we use a flag response_protected to protect/unprotect the response +ref_count: u32 = 1, + +// We must report a consistent value for this +reported_estimated_size: usize = 0, + +pub const getText = ResponseMixin.getText; +pub const getBody = ResponseMixin.getBody; +pub const getBytes = ResponseMixin.getBytes; +pub const getBodyUsed = ResponseMixin.getBodyUsed; +pub const getJSON = ResponseMixin.getJSON; +pub const getArrayBuffer = ResponseMixin.getArrayBuffer; +pub const getBlob = ResponseMixin.getBlob; +pub const getBlobWithoutCallFrame = ResponseMixin.getBlobWithoutCallFrame; +pub const getFormData = ResponseMixin.getFormData; + +pub fn getFormDataEncoding(this: *Response) ?*bun.FormData.AsyncFormData { + var content_type_slice: ZigString.Slice = this.getContentType() orelse return null; + defer content_type_slice.deinit(); + const encoding = bun.FormData.Encoding.get(content_type_slice.slice()) orelse return null; + return bun.FormData.AsyncFormData.init(bun.default_allocator, encoding) catch bun.outOfMemory(); +} + +pub fn estimatedSize(this: *Response) callconv(.C) usize { + return this.reported_estimated_size; +} + +pub fn calculateEstimatedByteSize(this: *Response) void { + this.reported_estimated_size = this.body.value.estimatedSize() + + this.url.byteSlice().len + + this.init.status_text.byteSlice().len + + @sizeOf(Response); +} + +pub fn toJS(this: *Response, globalObject: *JSGlobalObject) JSValue { + this.calculateEstimatedByteSize(); + return js.toJSUnchecked(globalObject, this); +} + +pub fn getBodyValue( + this: *Response, +) *Body.Value { + return &this.body.value; +} + +pub export fn jsFunctionRequestOrResponseHasBodyValue(globalObject: *JSC.JSGlobalObject, callframe: *JSC.CallFrame) callconv(JSC.conv) JSC.JSValue { + _ = globalObject; // autofix + const arguments = callframe.arguments_old(1); + const this_value = arguments.ptr[0]; + if (this_value.isEmptyOrUndefinedOrNull()) { + return .false; + } + + if (this_value.as(Response)) |response| { + return JSC.JSValue.jsBoolean(!response.body.value.isDefinitelyEmpty()); + } else if (this_value.as(Request)) |request| { + return JSC.JSValue.jsBoolean(!request.body.value.isDefinitelyEmpty()); + } + + return .false; +} + +pub export fn jsFunctionGetCompleteRequestOrResponseBodyValueAsArrayBuffer(globalObject: *JSC.JSGlobalObject, callframe: *JSC.CallFrame) callconv(JSC.conv) JSC.JSValue { + const arguments = callframe.arguments_old(1); + const this_value = arguments.ptr[0]; + if (this_value.isEmptyOrUndefinedOrNull()) { + return .undefined; + } + + const body: *Body.Value = brk: { + if (this_value.as(Response)) |response| { + break :brk &response.body.value; + } else if (this_value.as(Request)) |request| { + break :brk &request.body.value; + } + + return .undefined; + }; + + // Get the body if it's available synchronously. + switch (body.*) { + .Used, .Empty, .Null => return .undefined, + .Blob => |*blob| { + if (blob.isBunFile()) { + return .undefined; + } + defer body.* = .{ .Used = {} }; + return blob.toArrayBuffer(globalObject, .transfer) catch return .zero; + }, + .WTFStringImpl, .InternalBlob => { + var any_blob = body.useAsAnyBlob(); + return any_blob.toArrayBufferTransfer(globalObject) catch return .zero; + }, + .Error, .Locked => return .undefined, + } +} + +pub fn getFetchHeaders( + this: *Response, +) ?*FetchHeaders { + return this.init.headers; +} + +pub inline fn statusCode(this: *const Response) u16 { + return this.init.status_code; +} + +pub fn redirectLocation(this: *const Response) ?[]const u8 { + return this.header(.Location); +} + +pub fn header(this: *const Response, name: bun.webcore.FetchHeaders.HTTPHeaderName) ?[]const u8 { + return if ((this.init.headers orelse return null).fastGet(name)) |str| + str.slice() + else + null; +} + +pub const Props = struct {}; + +pub fn writeFormat(this: *Response, comptime Formatter: type, formatter: *Formatter, writer: anytype, comptime enable_ansi_colors: bool) !void { + const Writer = @TypeOf(writer); + try writer.print("Response ({}) {{\n", .{bun.fmt.size(this.body.len(), .{})}); + + { + formatter.indent += 1; + defer formatter.indent -|= 1; + + try formatter.writeIndent(Writer, writer); + try writer.writeAll(comptime Output.prettyFmt("ok: ", enable_ansi_colors)); + try formatter.printAs(.Boolean, Writer, writer, JSC.JSValue.jsBoolean(this.isOK()), .BooleanObject, enable_ansi_colors); + formatter.printComma(Writer, writer, enable_ansi_colors) catch bun.outOfMemory(); + try writer.writeAll("\n"); + + try formatter.writeIndent(Writer, writer); + try writer.writeAll(comptime Output.prettyFmt("url: \"", enable_ansi_colors)); + try writer.print(comptime Output.prettyFmt("{}", enable_ansi_colors), .{this.url}); + try writer.writeAll("\""); + formatter.printComma(Writer, writer, enable_ansi_colors) catch bun.outOfMemory(); + try writer.writeAll("\n"); + + try formatter.writeIndent(Writer, writer); + try writer.writeAll(comptime Output.prettyFmt("status: ", enable_ansi_colors)); + try formatter.printAs(.Double, Writer, writer, JSC.JSValue.jsNumber(this.init.status_code), .NumberObject, enable_ansi_colors); + formatter.printComma(Writer, writer, enable_ansi_colors) catch bun.outOfMemory(); + try writer.writeAll("\n"); + + try formatter.writeIndent(Writer, writer); + try writer.writeAll(comptime Output.prettyFmt("statusText: ", enable_ansi_colors)); + try writer.print(comptime Output.prettyFmt("\"{}\"", enable_ansi_colors), .{this.init.status_text}); + formatter.printComma(Writer, writer, enable_ansi_colors) catch bun.outOfMemory(); + try writer.writeAll("\n"); + + try formatter.writeIndent(Writer, writer); + try writer.writeAll(comptime Output.prettyFmt("headers: ", enable_ansi_colors)); + try formatter.printAs(.Private, Writer, writer, this.getHeaders(formatter.globalThis), .DOMWrapper, enable_ansi_colors); + formatter.printComma(Writer, writer, enable_ansi_colors) catch bun.outOfMemory(); + try writer.writeAll("\n"); + + try formatter.writeIndent(Writer, writer); + try writer.writeAll(comptime Output.prettyFmt("redirected: ", enable_ansi_colors)); + try formatter.printAs(.Boolean, Writer, writer, JSC.JSValue.jsBoolean(this.redirected), .BooleanObject, enable_ansi_colors); + formatter.printComma(Writer, writer, enable_ansi_colors) catch bun.outOfMemory(); + try writer.writeAll("\n"); + + formatter.resetLine(); + try this.body.writeFormat(Formatter, formatter, writer, enable_ansi_colors); + } + try writer.writeAll("\n"); + try formatter.writeIndent(Writer, writer); + try writer.writeAll("}"); + formatter.resetLine(); +} + +pub fn isOK(this: *const Response) bool { + return this.init.status_code >= 200 and this.init.status_code <= 299; +} + +pub fn getURL( + this: *Response, + globalThis: *JSC.JSGlobalObject, +) JSC.JSValue { + // https://developer.mozilla.org/en-US/docs/Web/API/Response/url + return this.url.toJS(globalThis); +} + +pub fn getResponseType( + this: *Response, + globalThis: *JSC.JSGlobalObject, +) JSC.JSValue { + if (this.init.status_code < 200) { + return bun.String.static("error").toJS(globalThis); + } + + return bun.String.static("default").toJS(globalThis); +} + +pub fn getStatusText( + this: *Response, + globalThis: *JSC.JSGlobalObject, +) JSC.JSValue { + // https://developer.mozilla.org/en-US/docs/Web/API/Response/statusText + return this.init.status_text.toJS(globalThis); +} + +pub fn getRedirected( + this: *Response, + _: *JSC.JSGlobalObject, +) JSC.JSValue { + // https://developer.mozilla.org/en-US/docs/Web/API/Response/redirected + return JSValue.jsBoolean(this.redirected); +} + +pub fn getOK( + this: *Response, + _: *JSC.JSGlobalObject, +) JSC.JSValue { + // https://developer.mozilla.org/en-US/docs/Web/API/Response/ok + return JSValue.jsBoolean(this.isOK()); +} + +fn getOrCreateHeaders(this: *Response, globalThis: *JSC.JSGlobalObject) *FetchHeaders { + if (this.init.headers == null) { + this.init.headers = FetchHeaders.createEmpty(); + + if (this.body.value == .Blob) { + const content_type = this.body.value.Blob.content_type; + if (content_type.len > 0) { + this.init.headers.?.put(.ContentType, content_type, globalThis); + } + } + } + + return this.init.headers.?; +} + +pub fn getHeaders( + this: *Response, + globalThis: *JSC.JSGlobalObject, +) JSC.JSValue { + return this.getOrCreateHeaders(globalThis).toJS(globalThis); +} + +pub fn doClone( + this: *Response, + globalThis: *JSC.JSGlobalObject, + callframe: *JSC.CallFrame, +) bun.JSError!JSValue { + const this_value = callframe.this(); + const cloned = this.clone(globalThis); + if (globalThis.hasException()) { + cloned.finalize(); + return .zero; + } + + const js_wrapper = Response.makeMaybePooled(globalThis, cloned); + + if (js_wrapper != .zero) { + if (cloned.body.value == .Locked) { + if (cloned.body.value.Locked.readable.get(globalThis)) |readable| { + // If we are teed, then we need to update the cached .body + // value to point to the new readable stream + // We must do this on both the original and cloned response + // but especially the original response since it will have a stale .body value now. + js.bodySetCached(js_wrapper, globalThis, readable.value); + if (this.body.value.Locked.readable.get(globalThis)) |other_readable| { + js.bodySetCached(this_value, globalThis, other_readable.value); + } + } + } + } + + return js_wrapper; +} + +pub fn makeMaybePooled(globalObject: *JSC.JSGlobalObject, ptr: *Response) JSValue { + return ptr.toJS(globalObject); +} + +pub fn cloneValue( + this: *Response, + globalThis: *JSGlobalObject, +) Response { + return Response{ + .body = this.body.clone(globalThis), + .init = this.init.clone(globalThis), + .url = this.url.clone(), + .redirected = this.redirected, + }; +} + +pub fn clone(this: *Response, globalThis: *JSGlobalObject) *Response { + return bun.new(Response, this.cloneValue(globalThis)); +} + +pub fn getStatus( + this: *Response, + _: *JSC.JSGlobalObject, +) JSC.JSValue { + // https://developer.mozilla.org/en-US/docs/Web/API/Response/status + return JSValue.jsNumber(this.init.status_code); +} + +fn destroy(this: *Response) void { + this.init.deinit(bun.default_allocator); + this.body.deinit(bun.default_allocator); + this.url.deref(); + + bun.destroy(this); +} + +pub fn ref(this: *Response) *Response { + this.ref_count += 1; + return this; +} + +pub fn unref(this: *Response) void { + bun.assert(this.ref_count > 0); + this.ref_count -= 1; + if (this.ref_count == 0) { + this.destroy(); + } +} + +pub fn finalize( + this: *Response, +) callconv(.C) void { + this.unref(); +} + +pub fn getContentType( + this: *Response, +) ?ZigString.Slice { + if (this.init.headers) |headers| { + if (headers.fastGet(.ContentType)) |value| { + return value.toSlice(bun.default_allocator); + } + } + + if (this.body.value == .Blob) { + if (this.body.value.Blob.content_type.len > 0) + return ZigString.Slice.fromUTF8NeverFree(this.body.value.Blob.content_type); + } + + return null; +} + +pub fn constructJSON( + globalThis: *JSC.JSGlobalObject, + callframe: *JSC.CallFrame, +) bun.JSError!JSValue { + const args_list = callframe.arguments_old(2); + // https://github.com/remix-run/remix/blob/db2c31f64affb2095e4286b91306b96435967969/packages/remix-server-runtime/responses.ts#L4 + var args = JSC.CallFrame.ArgumentsSlice.init(globalThis.bunVM(), args_list.ptr[0..args_list.len]); + + var response = Response{ + .body = Body{ + .value = .{ .Empty = {} }, + }, + .init = Response.Init{ + .status_code = 200, + }, + .url = bun.String.empty, + }; + var did_succeed = false; + defer { + if (!did_succeed) { + response.body.deinit(bun.default_allocator); + response.init.deinit(bun.default_allocator); + } + } + const json_value = args.nextEat() orelse JSC.JSValue.zero; + + if (@intFromEnum(json_value) != 0) { + var str = bun.String.empty; + // calling JSON.stringify on an empty string adds extra quotes + // so this is correct + json_value.jsonStringify(globalThis, 0, &str); + + if (globalThis.hasException()) { + return .zero; + } + + if (!str.isEmpty()) { + if (str.value.WTFStringImpl.toUTF8IfNeeded(bun.default_allocator)) |bytes| { + defer str.deref(); + response.body.value = .{ + .InternalBlob = InternalBlob{ + .bytes = std.ArrayList(u8).fromOwnedSlice(bun.default_allocator, @constCast(bytes.slice())), + .was_string = true, + }, + }; + } else { + response.body.value = Body.Value{ + .WTFStringImpl = str.value.WTFStringImpl, + }; + } + } + } + + if (args.nextEat()) |init| { + if (init.isUndefinedOrNull()) {} else if (init.isNumber()) { + response.init.status_code = @as(u16, @intCast(@min(@max(0, init.toInt32()), std.math.maxInt(u16)))); + } else { + if (Response.Init.init(globalThis, init) catch |err| if (err == error.JSError) return .zero else null) |_init| { + response.init = _init; + } + } + } + + var headers_ref = response.getOrCreateHeaders(globalThis); + headers_ref.putDefault(.ContentType, MimeType.json.value, globalThis); + did_succeed = true; + return bun.new(Response, response).toJS(globalThis); +} +pub fn constructRedirect( + globalThis: *JSC.JSGlobalObject, + callframe: *JSC.CallFrame, +) bun.JSError!JSValue { + var args_list = callframe.arguments_old(4); + // https://github.com/remix-run/remix/blob/db2c31f64affb2095e4286b91306b96435967969/packages/remix-server-runtime/responses.ts#L4 + var args = JSC.CallFrame.ArgumentsSlice.init(globalThis.bunVM(), args_list.ptr[0..args_list.len]); + + var url_string_slice = ZigString.Slice.empty; + defer url_string_slice.deinit(); + var response: Response = brk: { + var response = Response{ + .init = Response.Init{ + .status_code = 302, + }, + .body = Body{ + .value = .{ .Empty = {} }, + }, + .url = bun.String.empty, + }; + + const url_string_value = args.nextEat() orelse JSC.JSValue.zero; + var url_string = ZigString.init(""); + + if (@intFromEnum(url_string_value) != 0) { + url_string = try url_string_value.getZigString(globalThis); + } + url_string_slice = url_string.toSlice(bun.default_allocator); + var did_succeed = false; + defer { + if (!did_succeed) { + response.body.deinit(bun.default_allocator); + response.init.deinit(bun.default_allocator); + } + } + + if (args.nextEat()) |init| { + if (init.isUndefinedOrNull()) {} else if (init.isNumber()) { + response.init.status_code = @as(u16, @intCast(@min(@max(0, init.toInt32()), std.math.maxInt(u16)))); + } else { + if (Response.Init.init(globalThis, init) catch |err| + if (err == error.JSError) return .zero else null) |_init| + { + response.init = _init; + response.init.status_code = 302; + } + } + } + if (globalThis.hasException()) { + return .zero; + } + did_succeed = true; + break :brk response; + }; + + response.init.headers = response.getOrCreateHeaders(globalThis); + var headers_ref = response.init.headers.?; + headers_ref.put(.Location, url_string_slice.slice(), globalThis); + const ptr = bun.new(Response, response); + + return ptr.toJS(globalThis); +} +pub fn constructError( + globalThis: *JSC.JSGlobalObject, + _: *JSC.CallFrame, +) bun.JSError!JSValue { + const response = bun.new( + Response, + Response{ + .init = Response.Init{ + .status_code = 0, + }, + .body = Body{ + .value = .{ .Empty = {} }, + }, + }, + ); + + return response.toJS(globalThis); +} + +pub fn constructor(globalThis: *JSC.JSGlobalObject, callframe: *JSC.CallFrame) bun.JSError!*Response { + const arguments = callframe.argumentsAsArray(2); + + if (!arguments[0].isUndefinedOrNull() and arguments[0].isObject()) { + if (arguments[0].as(Blob)) |blob| { + if (blob.isS3()) { + if (!arguments[1].isEmptyOrUndefinedOrNull()) { + return globalThis.throwInvalidArguments("new Response(s3File) do not support ResponseInit options", .{}); + } + var response: Response = .{ + .init = Response.Init{ + .status_code = 302, + }, + .body = Body{ + .value = .{ .Empty = {} }, + }, + .url = bun.String.empty, + }; + + const credentials = blob.store.?.data.s3.getCredentials(); + + const result = credentials.signRequest(.{ + .path = blob.store.?.data.s3.path(), + .method = .GET, + }, false, .{ .expires = 15 * 60 }) catch |sign_err| { + return s3.throwSignError(sign_err, globalThis); + }; + defer result.deinit(); + response.init.headers = response.getOrCreateHeaders(globalThis); + response.redirected = true; + var headers_ref = response.init.headers.?; + headers_ref.put(.Location, result.url, globalThis); + return bun.new(Response, response); + } + } + } + var init: Init = (brk: { + if (arguments[1].isUndefinedOrNull()) { + break :brk Init{ + .status_code = 200, + .headers = null, + }; + } + if (arguments[1].isObject()) { + break :brk try Init.init(globalThis, arguments[1]) orelse unreachable; + } + if (!globalThis.hasException()) { + return globalThis.throwInvalidArguments("Failed to construct 'Response': The provided body value is not of type 'ResponseInit'", .{}); + } + return error.JSError; + }); + errdefer init.deinit(bun.default_allocator); + + if (globalThis.hasException()) { + return error.JSError; + } + + var body: Body = brk: { + if (arguments[0].isUndefinedOrNull()) { + break :brk Body{ + .value = Body.Value{ .Null = {} }, + }; + } + break :brk try Body.extract(globalThis, arguments[0]); + }; + errdefer body.deinit(bun.default_allocator); + + if (globalThis.hasException()) { + return error.JSError; + } + + var response = bun.new(Response, Response{ + .body = body, + .init = init, + }); + + if (response.body.value == .Blob and + response.init.headers != null and + response.body.value.Blob.content_type.len > 0 and + !response.init.headers.?.fastHas(.ContentType)) + { + response.init.headers.?.put(.ContentType, response.body.value.Blob.content_type, globalThis); + } + + response.calculateEstimatedByteSize(); + + return response; +} + +pub const Init = struct { + headers: ?*FetchHeaders = null, + status_code: u16, + status_text: bun.String = bun.String.empty, + method: Method = Method.GET, + + pub fn clone(this: Init, ctx: *JSGlobalObject) Init { + var that = this; + const headers = this.headers; + if (headers) |head| { + that.headers = head.cloneThis(ctx); + } + that.status_text = this.status_text.clone(); + + return that; + } + + pub fn init(globalThis: *JSGlobalObject, response_init: JSC.JSValue) bun.JSError!?Init { + var result = Init{ .status_code = 200 }; + errdefer { + result.deinit(bun.default_allocator); + } + + if (!response_init.isCell()) + return null; + + if (response_init.jsType() == .DOMWrapper) { + // fast path: it's a Request object or a Response object + // we can skip calling JS getters + if (response_init.asDirect(Request)) |req| { + if (req.getFetchHeadersUnlessEmpty()) |headers| { + result.headers = headers.cloneThis(globalThis); + } + + result.method = req.method; + return result; + } + + if (response_init.asDirect(Response)) |resp| { + return resp.init.clone(globalThis); + } + } + + if (globalThis.hasException()) { + return error.JSError; + } + + if (response_init.fastGet(globalThis, .headers)) |headers| { + if (headers.as(FetchHeaders)) |orig| { + if (!orig.isEmpty()) { + result.headers = orig.cloneThis(globalThis); + } + } else { + result.headers = FetchHeaders.createFromJS(globalThis, headers); + } + } + + if (globalThis.hasException()) { + return error.JSError; + } + + if (response_init.fastGet(globalThis, .status)) |status_value| { + const number = status_value.coerceToInt64(globalThis); + if ((200 <= number and number < 600) or number == 101) { + result.status_code = @as(u16, @truncate(@as(u32, @intCast(number)))); + } else { + if (!globalThis.hasException()) { + const err = globalThis.createRangeErrorInstance("The status provided ({d}) must be 101 or in the range of [200, 599]", .{number}); + return globalThis.throwValue(err); + } + return error.JSError; + } + } + + if (globalThis.hasException()) { + return error.JSError; + } + + if (response_init.fastGet(globalThis, .statusText)) |status_text| { + result.status_text = try bun.String.fromJS(status_text, globalThis); + } + + if (response_init.fastGet(globalThis, .method)) |method_value| { + if (try Method.fromJS(globalThis, method_value)) |method| { + result.method = method; + } + } + + return result; + } + + pub fn deinit(this: *Init, _: std.mem.Allocator) void { + if (this.headers) |headers| { + this.headers = null; + + headers.deref(); + } + + this.status_text.deref(); + this.status_text = bun.String.empty; + } +}; + +pub fn @"404"(globalThis: *JSC.JSGlobalObject) Response { + return emptyWithStatus(globalThis, 404); +} + +pub fn @"200"(globalThis: *JSC.JSGlobalObject) Response { + return emptyWithStatus(globalThis, 200); +} + +inline fn emptyWithStatus(_: *JSC.JSGlobalObject, status: u16) Response { + return bun.new(Response, .{ + .body = Body{ + .value = Body.Value{ .Null = {} }, + }, + .init = Init{ + .status_code = status, + }, + }); +} + +/// https://developer.mozilla.org/en-US/docs/Web/API/Headers +// TODO: move to http.zig. this has nothing to do with JSC or WebCore + +const std = @import("std"); +const Api = @import("../../api/schema.zig").Api; +const bun = @import("bun"); +const MimeType = bun.http.MimeType; +const ZigURL = @import("../../url.zig").URL; +const http = bun.http; +const FetchRedirect = http.FetchRedirect; +const JSC = bun.JSC; + +const Method = @import("../../http/method.zig").Method; +const FetchHeaders = bun.webcore.FetchHeaders; +const ObjectPool = @import("../../pool.zig").ObjectPool; +const SystemError = JSC.SystemError; +const Output = bun.Output; +const MutableString = bun.MutableString; +const strings = bun.strings; +const string = bun.string; +const default_allocator = bun.default_allocator; +const FeatureFlags = bun.FeatureFlags; + +const Environment = @import("../../env.zig"); +const ZigString = JSC.ZigString; +const IdentityContext = @import("../../identity_context.zig").IdentityContext; +const JSPromise = JSC.JSPromise; +const JSValue = JSC.JSValue; +const JSGlobalObject = JSC.JSGlobalObject; +const NullableAllocator = bun.NullableAllocator; +const DataURL = @import("../../resolver/data_url.zig").DataURL; + +const SSLConfig = @import("../api/server.zig").ServerConfig.SSLConfig; + +const VirtualMachine = JSC.VirtualMachine; +const Task = JSC.Task; +const JSPrinter = bun.js_printer; +const picohttp = bun.picohttp; +const StringJoiner = bun.StringJoiner; +const uws = bun.uws; +const Mutex = bun.Mutex; + +const InlineBlob = JSC.WebCore.Blob.Inline; +const AnyBlob = JSC.WebCore.Blob.Any; +const InternalBlob = JSC.WebCore.Blob.Internal; +const BodyMixin = JSC.WebCore.Body.Mixin; +const Body = JSC.WebCore.Body; +const Request = JSC.WebCore.Request; +const Blob = JSC.WebCore.Blob; +const Async = bun.Async; + +const BoringSSL = bun.BoringSSL.c; +const X509 = @import("../api/bun/x509.zig"); +const PosixToWinNormalizer = bun.path.PosixToWinNormalizer; +const s3 = bun.S3; diff --git a/src/bun.js/webcore/S3Client.zig b/src/bun.js/webcore/S3Client.zig index 6b9935ca9b..b759844146 100644 --- a/src/bun.js/webcore/S3Client.zig +++ b/src/bun.js/webcore/S3Client.zig @@ -101,7 +101,7 @@ pub const S3Client = struct { pub fn constructor(globalThis: *JSC.JSGlobalObject, callframe: *JSC.CallFrame) bun.JSError!*@This() { const arguments = callframe.arguments_old(1).slice(); - var args = JSC.Node.ArgumentsSlice.init(globalThis.bunVM(), arguments); + var args = JSC.CallFrame.ArgumentsSlice.init(globalThis.bunVM(), arguments); defer args.deinit(); var aws_options = try S3Credentials.getCredentialsWithOptions(globalThis.bunVM().transpiler.env.getS3Credentials(), .{}, args.nextEat(), null, null, globalThis); defer aws_options.deinit(); @@ -135,7 +135,7 @@ pub const S3Client = struct { } pub fn file(ptr: *@This(), globalThis: *JSC.JSGlobalObject, callframe: *JSC.CallFrame) bun.JSError!JSValue { const arguments = callframe.arguments_old(2).slice(); - var args = JSC.Node.ArgumentsSlice.init(globalThis.bunVM(), arguments); + var args = JSC.CallFrame.ArgumentsSlice.init(globalThis.bunVM(), arguments); defer args.deinit(); const path: JSC.Node.PathLike = try JSC.Node.PathLike.fromJS(globalThis, &args) orelse { if (args.len() == 0) { @@ -152,7 +152,7 @@ pub const S3Client = struct { pub fn presign(ptr: *@This(), globalThis: *JSC.JSGlobalObject, callframe: *JSC.CallFrame) bun.JSError!JSValue { const arguments = callframe.arguments_old(2).slice(); - var args = JSC.Node.ArgumentsSlice.init(globalThis.bunVM(), arguments); + var args = JSC.CallFrame.ArgumentsSlice.init(globalThis.bunVM(), arguments); defer args.deinit(); const path: JSC.Node.PathLike = try JSC.Node.PathLike.fromJS(globalThis, &args) orelse { if (args.len() == 0) { @@ -170,7 +170,7 @@ pub const S3Client = struct { pub fn exists(ptr: *@This(), globalThis: *JSC.JSGlobalObject, callframe: *JSC.CallFrame) bun.JSError!JSValue { const arguments = callframe.arguments_old(2).slice(); - var args = JSC.Node.ArgumentsSlice.init(globalThis.bunVM(), arguments); + var args = JSC.CallFrame.ArgumentsSlice.init(globalThis.bunVM(), arguments); defer args.deinit(); const path: JSC.Node.PathLike = try JSC.Node.PathLike.fromJS(globalThis, &args) orelse { if (args.len() == 0) { @@ -187,7 +187,7 @@ pub const S3Client = struct { pub fn size(ptr: *@This(), globalThis: *JSC.JSGlobalObject, callframe: *JSC.CallFrame) bun.JSError!JSValue { const arguments = callframe.arguments_old(2).slice(); - var args = JSC.Node.ArgumentsSlice.init(globalThis.bunVM(), arguments); + var args = JSC.CallFrame.ArgumentsSlice.init(globalThis.bunVM(), arguments); defer args.deinit(); const path: JSC.Node.PathLike = try JSC.Node.PathLike.fromJS(globalThis, &args) orelse { if (args.len() == 0) { @@ -204,7 +204,7 @@ pub const S3Client = struct { pub fn stat(ptr: *@This(), globalThis: *JSC.JSGlobalObject, callframe: *JSC.CallFrame) bun.JSError!JSValue { const arguments = callframe.arguments_old(2).slice(); - var args = JSC.Node.ArgumentsSlice.init(globalThis.bunVM(), arguments); + var args = JSC.CallFrame.ArgumentsSlice.init(globalThis.bunVM(), arguments); defer args.deinit(); const path: JSC.Node.PathLike = try JSC.Node.PathLike.fromJS(globalThis, &args) orelse { if (args.len() == 0) { @@ -221,7 +221,7 @@ pub const S3Client = struct { pub fn write(ptr: *@This(), globalThis: *JSC.JSGlobalObject, callframe: *JSC.CallFrame) bun.JSError!JSValue { const arguments = callframe.arguments_old(3).slice(); - var args = JSC.Node.ArgumentsSlice.init(globalThis.bunVM(), arguments); + var args = JSC.CallFrame.ArgumentsSlice.init(globalThis.bunVM(), arguments); defer args.deinit(); const path: JSC.Node.PathLike = try JSC.Node.PathLike.fromJS(globalThis, &args) orelse { return globalThis.ERR(.MISSING_ARGS, "Expected a path to write to", .{}).throw(); @@ -255,7 +255,7 @@ pub const S3Client = struct { pub fn unlink(ptr: *@This(), globalThis: *JSC.JSGlobalObject, callframe: *JSC.CallFrame) bun.JSError!JSValue { const arguments = callframe.arguments_old(2).slice(); - var args = JSC.Node.ArgumentsSlice.init(globalThis.bunVM(), arguments); + var args = JSC.CallFrame.ArgumentsSlice.init(globalThis.bunVM(), arguments); defer args.deinit(); const path: JSC.Node.PathLike = try JSC.Node.PathLike.fromJS(globalThis, &args) orelse { return globalThis.ERR(.MISSING_ARGS, "Expected a path to unlink", .{}).throw(); @@ -302,7 +302,7 @@ pub const S3Client = struct { pub fn staticFile(globalThis: *JSC.JSGlobalObject, callframe: *JSC.CallFrame) bun.JSError!JSValue { const arguments = callframe.arguments_old(2).slice(); - var args = JSC.Node.ArgumentsSlice.init(globalThis.bunVM(), arguments); + var args = JSC.CallFrame.ArgumentsSlice.init(globalThis.bunVM(), arguments); defer args.deinit(); const path = (try JSC.Node.PathLike.fromJS(globalThis, &args)) orelse { diff --git a/src/bun.js/webcore/S3File.zig b/src/bun.js/webcore/S3File.zig index 99ac3145f7..d46c462d8a 100644 --- a/src/bun.js/webcore/S3File.zig +++ b/src/bun.js/webcore/S3File.zig @@ -11,7 +11,7 @@ const Output = bun.Output; const S3Client = @import("./S3Client.zig"); const S3 = bun.S3; const S3Stat = @import("./S3Stat.zig").S3Stat; -pub fn writeFormat(s3: *Blob.S3Store, comptime Formatter: type, formatter: *Formatter, writer: anytype, comptime enable_ansi_colors: bool, content_type: []const u8, offset: usize) !void { +pub fn writeFormat(s3: *Blob.Store.S3, comptime Formatter: type, formatter: *Formatter, writer: anytype, comptime enable_ansi_colors: bool, content_type: []const u8, offset: usize) !void { try writer.writeAll(comptime Output.prettyFmt("S3Ref", enable_ansi_colors)); const credentials = s3.getCredentials(); // detect virtual host style bucket name @@ -75,7 +75,7 @@ pub fn writeFormat(s3: *Blob.S3Store, comptime Formatter: type, formatter: *Form } pub fn presign(globalThis: *JSC.JSGlobalObject, callframe: *JSC.CallFrame) bun.JSError!JSValue { const arguments = callframe.arguments_old(3).slice(); - var args = JSC.Node.ArgumentsSlice.init(globalThis.bunVM(), arguments); + var args = JSC.CallFrame.ArgumentsSlice.init(globalThis.bunVM(), arguments); defer args.deinit(); // accept a path or a blob @@ -106,7 +106,7 @@ pub fn presign(globalThis: *JSC.JSGlobalObject, callframe: *JSC.CallFrame) bun.J pub fn unlink(globalThis: *JSC.JSGlobalObject, callframe: *JSC.CallFrame) bun.JSError!JSValue { const arguments = callframe.arguments_old(3).slice(); - var args = JSC.Node.ArgumentsSlice.init(globalThis.bunVM(), arguments); + var args = JSC.CallFrame.ArgumentsSlice.init(globalThis.bunVM(), arguments); defer args.deinit(); // accept a path or a blob @@ -138,7 +138,7 @@ pub fn unlink(globalThis: *JSC.JSGlobalObject, callframe: *JSC.CallFrame) bun.JS pub fn write(globalThis: *JSC.JSGlobalObject, callframe: *JSC.CallFrame) bun.JSError!JSValue { const arguments = callframe.arguments_old(3).slice(); - var args = JSC.Node.ArgumentsSlice.init(globalThis.bunVM(), arguments); + var args = JSC.CallFrame.ArgumentsSlice.init(globalThis.bunVM(), arguments); defer args.deinit(); // accept a path or a blob @@ -181,7 +181,7 @@ pub fn write(globalThis: *JSC.JSGlobalObject, callframe: *JSC.CallFrame) bun.JSE pub fn size(globalThis: *JSC.JSGlobalObject, callframe: *JSC.CallFrame) bun.JSError!JSValue { const arguments = callframe.arguments_old(3).slice(); - var args = JSC.Node.ArgumentsSlice.init(globalThis.bunVM(), arguments); + var args = JSC.CallFrame.ArgumentsSlice.init(globalThis.bunVM(), arguments); defer args.deinit(); // accept a path or a blob @@ -214,7 +214,7 @@ pub fn size(globalThis: *JSC.JSGlobalObject, callframe: *JSC.CallFrame) bun.JSEr } pub fn exists(globalThis: *JSC.JSGlobalObject, callframe: *JSC.CallFrame) bun.JSError!JSValue { const arguments = callframe.arguments_old(3).slice(); - var args = JSC.Node.ArgumentsSlice.init(globalThis.bunVM(), arguments); + var args = JSC.CallFrame.ArgumentsSlice.init(globalThis.bunVM(), arguments); defer args.deinit(); // accept a path or a blob @@ -560,7 +560,7 @@ pub fn getStat(this: *Blob, globalThis: *JSC.JSGlobalObject, _: *JSC.CallFrame) pub fn stat(globalThis: *JSC.JSGlobalObject, callframe: *JSC.CallFrame) bun.JSError!JSValue { const arguments = callframe.arguments_old(3).slice(); - var args = JSC.Node.ArgumentsSlice.init(globalThis.bunVM(), arguments); + var args = JSC.CallFrame.ArgumentsSlice.init(globalThis.bunVM(), arguments); defer args.deinit(); // accept a path or a blob @@ -614,7 +614,7 @@ pub fn constructInternal( ) bun.JSError!*Blob { const vm = globalObject.bunVM(); const arguments = callframe.arguments_old(2).slice(); - var args = JSC.Node.ArgumentsSlice.init(vm, arguments); + var args = JSC.CallFrame.ArgumentsSlice.init(vm, arguments); defer args.deinit(); const path = (try JSC.Node.PathLike.fromJS(globalObject, &args)) orelse { @@ -650,8 +650,8 @@ comptime { } pub const exports = struct { - pub const JSS3File__presign = JSC.toJSHostFunctionWithContext(Blob, getPresignUrl); - pub const JSS3File__stat = JSC.toJSHostFunctionWithContext(Blob, getStat); + pub const JSS3File__presign = JSC.toJSHostFnWithContext(Blob, getPresignUrl); + pub const JSS3File__stat = JSC.toJSHostFnWithContext(Blob, getStat); }; extern fn BUN__createJSS3File(*JSC.JSGlobalObject, *JSC.CallFrame) callconv(JSC.conv) JSValue; extern fn BUN__createJSS3FileUnsafely(*JSC.JSGlobalObject, *Blob) callconv(JSC.conv) JSValue; diff --git a/src/bun.js/webcore/Sink.zig b/src/bun.js/webcore/Sink.zig new file mode 100644 index 0000000000..122ce0d47f --- /dev/null +++ b/src/bun.js/webcore/Sink.zig @@ -0,0 +1,657 @@ +const Sink = @This(); + +ptr: *anyopaque, +vtable: VTable, +status: Status = Status.closed, +used: bool = false, + +pub const ArrayBufferSink = @import("ArrayBufferSink.zig"); + +pub const pending = Sink{ + .ptr = @as(*anyopaque, @ptrFromInt(0xaaaaaaaa)), + .vtable = undefined, +}; + +pub const Status = enum { + ready, + closed, +}; + +pub const Data = union(enum) { + utf16: streams.Result, + latin1: streams.Result, + bytes: streams.Result, +}; + +pub fn initWithType(comptime Type: type, handler: *Type) Sink { + return .{ + .ptr = handler, + .vtable = VTable.wrap(Type), + .status = .ready, + .used = false, + }; +} + +pub fn init(handler: anytype) Sink { + return initWithType(std.meta.Child(@TypeOf(handler)), handler); +} + +pub const UTF8Fallback = struct { + const stack_size = 1024; + pub fn writeLatin1(comptime Ctx: type, ctx: *Ctx, input: streams.Result, comptime writeFn: anytype) streams.Result.Writable { + const str = input.slice(); + if (bun.strings.isAllASCII(str)) { + return writeFn( + ctx, + input, + ); + } + + if (stack_size >= str.len) { + var buf: [stack_size]u8 = undefined; + @memcpy(buf[0..str.len], str); + + bun.strings.replaceLatin1WithUTF8(buf[0..str.len]); + if (input.isDone()) { + const result = writeFn(ctx, .{ .temporary_and_done = bun.ByteList.init(buf[0..str.len]) }); + return result; + } else { + const result = writeFn(ctx, .{ .temporary = bun.ByteList.init(buf[0..str.len]) }); + return result; + } + } + + { + var slice = bun.default_allocator.alloc(u8, str.len) catch return .{ .err = Syscall.Error.oom }; + @memcpy(slice[0..str.len], str); + + bun.strings.replaceLatin1WithUTF8(slice[0..str.len]); + if (input.isDone()) { + return writeFn(ctx, .{ .owned_and_done = bun.ByteList.init(slice) }); + } else { + return writeFn(ctx, .{ .owned = bun.ByteList.init(slice) }); + } + } + } + + pub fn writeUTF16(comptime Ctx: type, ctx: *Ctx, input: streams.Result, comptime writeFn: anytype) streams.Result.Writable { + const str: []const u16 = std.mem.bytesAsSlice(u16, input.slice()); + + if (stack_size >= str.len * 2) { + var buf: [stack_size]u8 = undefined; + const copied = bun.strings.copyUTF16IntoUTF8(&buf, []const u16, str, true); + bun.assert(copied.written <= stack_size); + bun.assert(copied.read <= stack_size); + if (input.isDone()) { + const result = writeFn(ctx, .{ .temporary_and_done = bun.ByteList.init(buf[0..copied.written]) }); + return result; + } else { + const result = writeFn(ctx, .{ .temporary = bun.ByteList.init(buf[0..copied.written]) }); + return result; + } + } + + { + const allocated = bun.strings.toUTF8Alloc(bun.default_allocator, str) catch return .{ .err = Syscall.Error.oom }; + if (input.isDone()) { + return writeFn(ctx, .{ .owned_and_done = bun.ByteList.init(allocated) }); + } else { + return writeFn(ctx, .{ .owned = bun.ByteList.init(allocated) }); + } + } + } +}; + +pub const VTable = struct { + pub const WriteUTF16Fn = *const (fn (this: *anyopaque, data: streams.Result) streams.Result.Writable); + pub const WriteUTF8Fn = *const (fn (this: *anyopaque, data: streams.Result) streams.Result.Writable); + pub const WriteLatin1Fn = *const (fn (this: *anyopaque, data: streams.Result) streams.Result.Writable); + pub const EndFn = *const (fn (this: *anyopaque, err: ?Syscall.Error) JSC.Maybe(void)); + pub const ConnectFn = *const (fn (this: *anyopaque, signal: streams.Signal) JSC.Maybe(void)); + + connect: ConnectFn, + write: WriteUTF8Fn, + writeLatin1: WriteLatin1Fn, + writeUTF16: WriteUTF16Fn, + end: EndFn, + + pub fn wrap( + comptime Wrapped: type, + ) VTable { + const Functions = struct { + pub fn onWrite(this: *anyopaque, data: streams.Result) streams.Result.Writable { + return Wrapped.write(@as(*Wrapped, @ptrCast(@alignCast(this))), data); + } + pub fn onConnect(this: *anyopaque, signal: streams.Signal) JSC.Maybe(void) { + return Wrapped.connect(@as(*Wrapped, @ptrCast(@alignCast(this))), signal); + } + pub fn onWriteLatin1(this: *anyopaque, data: streams.Result) streams.Result.Writable { + return Wrapped.writeLatin1(@as(*Wrapped, @ptrCast(@alignCast(this))), data); + } + pub fn onWriteUTF16(this: *anyopaque, data: streams.Result) streams.Result.Writable { + return Wrapped.writeUTF16(@as(*Wrapped, @ptrCast(@alignCast(this))), data); + } + pub fn onEnd(this: *anyopaque, err: ?Syscall.Error) JSC.Maybe(void) { + return Wrapped.end(@as(*Wrapped, @ptrCast(@alignCast(this))), err); + } + }; + + return VTable{ + .write = Functions.onWrite, + .writeLatin1 = Functions.onWriteLatin1, + .writeUTF16 = Functions.onWriteUTF16, + .end = Functions.onEnd, + .connect = Functions.onConnect, + }; + } +}; + +pub fn end(this: *Sink, err: ?Syscall.Error) JSC.Maybe(void) { + if (this.status == .closed) { + return .{ .result = {} }; + } + + this.status = .closed; + return this.vtable.end(this.ptr, err); +} + +pub fn writeLatin1(this: *Sink, data: streams.Result) streams.Result.Writable { + if (this.status == .closed) { + return .{ .done = {} }; + } + + const res = this.vtable.writeLatin1(this.ptr, data); + this.status = if ((res.isDone()) or this.status == .closed) + Status.closed + else + Status.ready; + this.used = true; + return res; +} + +pub fn writeBytes(this: *Sink, data: streams.Result) streams.Result.Writable { + if (this.status == .closed) { + return .{ .done = {} }; + } + + const res = this.vtable.write(this.ptr, data); + this.status = if ((res.isDone()) or this.status == .closed) + Status.closed + else + Status.ready; + this.used = true; + return res; +} + +pub fn writeUTF16(this: *Sink, data: streams.Result) streams.Result.Writable { + if (this.status == .closed) { + return .{ .done = {} }; + } + + const res = this.vtable.writeUTF16(this.ptr, data); + this.status = if ((res.isDone()) or this.status == .closed) + Status.closed + else + Status.ready; + this.used = true; + return res; +} + +pub fn write(this: *Sink, data: Data) streams.Result.Writable { + switch (data) { + .utf16 => |str| { + return this.writeUTF16(str); + }, + .latin1 => |str| { + return this.writeLatin1(str); + }, + .bytes => |bytes| { + return this.writeBytes(bytes); + }, + } +} + +pub fn JSSink(comptime SinkType: type, comptime abi_name: []const u8) type { + return struct { + sink: SinkType, + + const ThisSink = @This(); + + // This attaches it to JS + pub const SinkSignal = extern struct { + cpp: JSValue, + + pub fn init(cpp: JSValue) streams.Signal { + // this one can be null + @setRuntimeSafety(false); + return streams.Signal.initWithType(SinkSignal, @as(*SinkSignal, @ptrFromInt(@as(usize, @bitCast(@intFromEnum(cpp)))))); + } + + pub fn close(this: *@This(), _: ?Syscall.Error) void { + onClose(@as(SinkSignal, @bitCast(@intFromPtr(this))).cpp, JSValue.jsUndefined()); + } + + pub fn ready(this: *@This(), _: ?Blob.SizeType, _: ?Blob.SizeType) void { + onReady(@as(SinkSignal, @bitCast(@intFromPtr(this))).cpp, JSValue.jsUndefined(), JSValue.jsUndefined()); + } + + pub fn start(_: *@This()) void {} + }; + + pub fn memoryCost(this: *ThisSink) callconv(.C) usize { + return @sizeOf(ThisSink) + SinkType.memoryCost(&this.sink); + } + + const AssignToStreamFn = *const fn (*JSGlobalObject, JSValue, *anyopaque, **anyopaque) callconv(.C) JSValue; + const OnCloseFn = *const fn (JSValue, JSValue) callconv(.C) void; + const OnReadyFn = *const fn (JSValue, JSValue, JSValue) callconv(.C) void; + const OnStartFn = *const fn (JSValue, *JSGlobalObject) callconv(.C) void; + const CreateObjectFn = *const fn (*JSGlobalObject, *anyopaque, usize) callconv(.C) JSValue; + const SetDestroyCallbackFn = *const fn (JSValue, usize) callconv(.C) void; + const DetachPtrFn = *const fn (JSValue) callconv(.C) void; + + const assignToStreamExtern = @extern(AssignToStreamFn, .{ .name = abi_name ++ "__assignToStream" }); + const onCloseExtern = @extern(OnCloseFn, .{ .name = abi_name ++ "__onClose" }); + const onReadyExtern = @extern(OnReadyFn, .{ .name = abi_name ++ "__onReady" }); + const onStartExtern = @extern(OnStartFn, .{ .name = abi_name ++ "__onStart" }); + const createObjectExtern = @extern(CreateObjectFn, .{ .name = abi_name ++ "__createObject" }); + const setDestroyCallbackExtern = @extern(SetDestroyCallbackFn, .{ .name = abi_name ++ "__setDestroyCallback" }); + const detachPtrExtern = @extern(DetachPtrFn, .{ .name = abi_name ++ "__detachPtr" }); + + pub fn assignToStream(globalThis: *JSGlobalObject, stream: JSValue, ptr: *anyopaque, jsvalue_ptr: **anyopaque) JSValue { + return assignToStreamExtern(globalThis, stream, ptr, jsvalue_ptr); + } + + pub fn onClose(ptr: JSValue, reason: JSValue) void { + JSC.markBinding(@src()); + return onCloseExtern(ptr, reason); + } + + pub fn onReady(ptr: JSValue, amount: JSValue, offset: JSValue) void { + JSC.markBinding(@src()); + return onReadyExtern(ptr, amount, offset); + } + + pub fn onStart(ptr: JSValue, globalThis: *JSGlobalObject) void { + JSC.markBinding(@src()); + return onStartExtern(ptr, globalThis); + } + + pub fn createObject(globalThis: *JSGlobalObject, object: *anyopaque, destructor: usize) JSValue { + JSC.markBinding(@src()); + return createObjectExtern(globalThis, object, destructor); + } + + pub fn setDestroyCallback(value: JSValue, callback: usize) void { + JSC.markBinding(@src()); + return setDestroyCallbackExtern(value, callback); + } + + pub fn detachPtr(ptr: JSValue) void { + return detachPtrExtern(ptr); + } + + pub fn construct(globalThis: *JSGlobalObject, _: *JSC.CallFrame) bun.JSError!JSC.JSValue { + JSC.markBinding(@src()); + + if (comptime !@hasDecl(SinkType, "construct")) { + const Static = struct { + pub const message = std.fmt.comptimePrint("{s} is not constructable", .{SinkType.name}); + }; + const err = JSC.SystemError{ + .message = bun.String.static(Static.message), + .code = bun.String.static(@tagName(.ERR_ILLEGAL_CONSTRUCTOR)), + }; + return globalThis.throwValue(err.toErrorInstance(globalThis)); + } + + var allocator = globalThis.bunVM().allocator; + var this = allocator.create(ThisSink) catch { + return globalThis.throwValue(Syscall.Error.oom.toJSC(globalThis)); + }; + this.sink.construct(allocator); + return createObject(globalThis, this, 0); + } + + pub fn finalize(ptr: *anyopaque) callconv(.C) void { + var this = @as(*ThisSink, @ptrCast(@alignCast(ptr))); + + this.sink.finalize(); + } + + pub fn detach(this: *ThisSink) void { + if (comptime !@hasField(SinkType, "signal")) + return; + + const ptr = this.sink.signal.ptr; + if (this.sink.signal.isDead()) + return; + this.sink.signal.clear(); + const value = @as(JSValue, @enumFromInt(@as(JSC.JSValue.backing_int, @bitCast(@intFromPtr(ptr))))); + value.unprotect(); + detachPtr(value); + } + + // The code generator encodes two distinct failure types using 0 and 1 + const FromJSResult = enum(usize) { + /// The sink has been closed and the wrapped type is freed. + detached = 0, + /// JS exception has not yet been thrown + cast_failed = 1, + /// *ThisSink + _, + }; + const fromJSExtern = @extern( + *const fn (value: JSValue) callconv(.C) FromJSResult, + .{ .name = abi_name ++ "__fromJS" }, + ); + + pub fn fromJS(value: JSValue) ?*ThisSink { + switch (fromJSExtern(value)) { + .detached, .cast_failed => return null, + else => |ptr| return @ptrFromInt(@intFromEnum(ptr)), + } + } + + fn getThis(global: *JSGlobalObject, callframe: *const JSC.CallFrame) bun.JSError!*ThisSink { + return switch (fromJSExtern(callframe.this())) { + .detached => global.throw("This " ++ abi_name ++ " has already been closed. A \"direct\" ReadableStream terminates its underlying socket once `async pull()` returns.", .{}), + .cast_failed => global.ERR(.INVALID_THIS, "Expected " ++ abi_name, .{}).throw(), + else => |ptr| @ptrFromInt(@intFromEnum(ptr)), + }; + } + + pub fn unprotect(this: *@This()) void { + _ = this; + } + + pub fn write(globalThis: *JSGlobalObject, callframe: *JSC.CallFrame) bun.JSError!JSC.JSValue { + JSC.markBinding(@src()); + const this = try getThis(globalThis, callframe); + + if (comptime @hasDecl(SinkType, "getPendingError")) { + if (this.sink.getPendingError()) |err| { + return globalThis.throwValue(err); + } + } + + const args_list = callframe.arguments_old(4); + const args = args_list.ptr[0..args_list.len]; + + if (args.len == 0) { + return globalThis.throwValue(globalThis.toTypeError(.MISSING_ARGS, "write() expects a string, ArrayBufferView, or ArrayBuffer", .{})); + } + + const arg = args[0]; + arg.ensureStillAlive(); + defer arg.ensureStillAlive(); + + if (arg.isEmptyOrUndefinedOrNull()) { + return globalThis.throwValue(globalThis.toTypeError(.STREAM_NULL_VALUES, "write() expects a string, ArrayBufferView, or ArrayBuffer", .{})); + } + + if (arg.asArrayBuffer(globalThis)) |buffer| { + const slice = buffer.slice(); + if (slice.len == 0) { + return JSC.JSValue.jsNumber(0); + } + + return this.sink.writeBytes(.{ .temporary = bun.ByteList.init(slice) }).toJS(globalThis); + } + + if (!arg.isString()) { + return globalThis.throwValue(globalThis.toTypeError(.INVALID_ARG_TYPE, "write() expects a string, ArrayBufferView, or ArrayBuffer", .{})); + } + + const str = arg.toString(globalThis); + if (globalThis.hasException()) { + return .zero; + } + + const view = str.view(globalThis); + + if (view.isEmpty()) { + return JSC.JSValue.jsNumber(0); + } + + defer str.ensureStillAlive(); + if (view.is16Bit()) { + return this.sink.writeUTF16(.{ .temporary = bun.ByteList.initConst(std.mem.sliceAsBytes(view.utf16SliceAligned())) }).toJS(globalThis); + } + + return this.sink.writeLatin1(.{ .temporary = bun.ByteList.initConst(view.slice()) }).toJS(globalThis); + } + + pub fn writeUTF8(globalThis: *JSGlobalObject, callframe: *JSC.CallFrame) bun.JSError!JSC.JSValue { + JSC.markBinding(@src()); + + const this = try getThis(globalThis, callframe); + + if (comptime @hasDecl(SinkType, "getPendingError")) { + if (this.sink.getPendingError()) |err| { + return globalThis.throwValue(err); + } + } + + const args_list = callframe.arguments_old(4); + const args = args_list.ptr[0..args_list.len]; + if (args.len == 0 or !args[0].isString()) { + const err = globalThis.toTypeError( + if (args.len == 0) .MISSING_ARGS else .INVALID_ARG_TYPE, + "writeUTF8() expects a string", + .{}, + ); + return globalThis.throwValue(err); + } + + const arg = args[0]; + + const str = arg.toString(globalThis); + if (globalThis.hasException()) { + return .zero; + } + + const view = str.view(globalThis); + if (view.isEmpty()) { + return JSC.JSValue.jsNumber(0); + } + + defer str.ensureStillAlive(); + if (str.is16Bit()) { + return this.sink.writeUTF16(.{ .temporary = view.utf16SliceAligned() }).toJS(globalThis); + } + + return this.sink.writeLatin1(.{ .temporary = view.slice() }).toJS(globalThis); + } + + pub fn close(globalThis: *JSGlobalObject, sink_ptr: ?*anyopaque) callconv(.C) JSValue { + JSC.markBinding(@src()); + const this: *ThisSink = @ptrCast(@alignCast(sink_ptr orelse return .undefined)); + + if (comptime @hasDecl(SinkType, "getPendingError")) { + if (this.sink.getPendingError()) |err| { + return globalThis.vm().throwError(globalThis, err) catch .zero; + } + } + + return this.sink.end(null).toJS(globalThis); + } + + pub fn flush(globalThis: *JSGlobalObject, callframe: *JSC.CallFrame) bun.JSError!JSC.JSValue { + JSC.markBinding(@src()); + + const this = try getThis(globalThis, callframe); + + if (comptime @hasDecl(SinkType, "getPendingError")) { + if (this.sink.getPendingError()) |err| { + return globalThis.throwValue(err); + } + } + + defer { + if ((comptime @hasField(SinkType, "done")) and this.sink.done) { + this.unprotect(); + } + } + + if (comptime @hasDecl(SinkType, "flushFromJS")) { + const wait = callframe.argumentsCount() > 0 and callframe.argument(0).isBoolean() and callframe.argument(0).asBoolean(); + const maybe_value: JSC.Maybe(JSValue) = this.sink.flushFromJS(globalThis, wait); + return switch (maybe_value) { + .result => |value| value, + .err => |err| return globalThis.throwValue(err.toJSC(globalThis)), + }; + } + + return this.sink.flush().toJS(globalThis); + } + + pub fn start(globalThis: *JSGlobalObject, callframe: *JSC.CallFrame) bun.JSError!JSC.JSValue { + JSC.markBinding(@src()); + + const this = try getThis(globalThis, callframe); + + if (comptime @hasDecl(SinkType, "getPendingError")) { + if (this.sink.getPendingError()) |err| { + return globalThis.throwValue(err); + } + } + + if (comptime @hasField(streams.Start, abi_name)) { + return this.sink.start( + if (callframe.argumentsCount() > 0) + try streams.Start.fromJSWithTag( + globalThis, + callframe.argument(0), + comptime @field(streams.Start, abi_name), + ) + else + .{ .empty = {} }, + ).toJS(globalThis); + } + + return this.sink.start( + if (callframe.argumentsCount() > 0) + try streams.Start.fromJS(globalThis, callframe.argument(0)) + else + .{ .empty = {} }, + ).toJS(globalThis); + } + + pub fn end(globalThis: *JSGlobalObject, callframe: *JSC.CallFrame) bun.JSError!JSC.JSValue { + JSC.markBinding(@src()); + + const this = try getThis(globalThis, callframe); + + if (comptime @hasDecl(SinkType, "getPendingError")) { + if (this.sink.getPendingError()) |err| { + return globalThis.throwValue(err); + } + } + + defer { + if (comptime @hasField(SinkType, "done")) { + if (this.sink.done) { + callframe.this().unprotect(); + } + } + } + + return this.sink.endFromJS(globalThis).toJS(globalThis); + } + + pub fn endWithSink(ptr: *anyopaque, globalThis: *JSGlobalObject) callconv(JSC.conv) JSValue { + JSC.markBinding(@src()); + + var this = @as(*ThisSink, @ptrCast(@alignCast(ptr))); + + if (comptime @hasDecl(SinkType, "getPendingError")) { + if (this.sink.getPendingError()) |err| { + return globalThis.throwValue(err); + } + } + + return this.sink.endFromJS(globalThis).toJS(globalThis); + } + + pub fn updateRef(ptr: *anyopaque, value: bool) callconv(.C) void { + JSC.markBinding(@src()); + var this = bun.cast(*ThisSink, ptr); + if (comptime @hasDecl(SinkType, "updateRef")) + this.sink.updateRef(value); + } + + const jsWrite = JSC.toJSHostFn(@This().write); + const jsFlush = JSC.toJSHostFn(flush); + const jsStart = JSC.toJSHostFn(start); + const jsEnd = JSC.toJSHostFn(@This().end); + const jsConstruct = JSC.toJSHostFn(construct); + + fn jsGetInternalFd(ptr: *anyopaque) callconv(.C) JSValue { + var this = bun.cast(*ThisSink, ptr); + if (comptime @hasDecl(SinkType, "getFd")) { + return JSValue.jsNumber(this.sink.getFd()); + } + return .null; + } + + comptime { + if (bun.Environment.export_cpp_apis) { + @export(&finalize, .{ .name = abi_name ++ "__finalize" }); + @export(&jsWrite, .{ .name = abi_name ++ "__write" }); + @export(&jsGetInternalFd, .{ .name = abi_name ++ "__getInternalFd" }); + @export(&close, .{ .name = abi_name ++ "__close" }); + @export(&jsFlush, .{ .name = abi_name ++ "__flush" }); + @export(&jsStart, .{ .name = abi_name ++ "__start" }); + @export(&jsEnd, .{ .name = abi_name ++ "__end" }); + @export(&jsConstruct, .{ .name = abi_name ++ "__construct" }); + @export(&endWithSink, .{ .name = abi_name ++ "__endWithSink" }); + @export(&updateRef, .{ .name = abi_name ++ "__updateRef" }); + @export(&memoryCost, .{ .name = abi_name ++ "__memoryCost" }); + } + } + }; +} + +const Detached = opaque {}; +const Subprocess = bun.api.Subprocess; +pub const DestructorPtr = bun.TaggedPointerUnion(.{ + Detached, + Subprocess, +}); + +pub export fn Bun__onSinkDestroyed( + ptr_value: ?*anyopaque, + sink_ptr: ?*anyopaque, +) callconv(.C) void { + _ = sink_ptr; // autofix + const ptr = DestructorPtr.from(ptr_value); + + if (ptr.isNull()) { + return; + } + + switch (ptr.tag()) { + @field(DestructorPtr.Tag, @typeName(Detached)) => { + return; + }, + @field(DestructorPtr.Tag, @typeName(Subprocess)) => { + const subprocess = ptr.as(Subprocess); + subprocess.onStdinDestroyed(); + }, + else => { + Output.debugWarn("Unknown sink type", .{}); + }, + } +} + +const std = @import("std"); +const bun = @import("bun"); +const Syscall = bun.sys; +const Output = bun.Output; +const JSC = bun.jsc; +const webcore = bun.webcore; +const streams = webcore.streams; +const JSValue = JSC.JSValue; +const JSGlobalObject = JSC.JSGlobalObject; +const Blob = webcore.Blob; diff --git a/src/bun.js/webcore/TextEncoder.zig b/src/bun.js/webcore/TextEncoder.zig index a8f316eb91..80b7510ff8 100644 --- a/src/bun.js/webcore/TextEncoder.zig +++ b/src/bun.js/webcore/TextEncoder.zig @@ -31,6 +31,7 @@ pub export fn TextEncoder__encode8( return ArrayBuffer.fromBytes(bytes, .Uint8Array).toJSUnchecked(globalThis, null); } } + pub export fn TextEncoder__encode16( globalThis: *JSGlobalObject, ptr: [*]const u16, @@ -71,7 +72,7 @@ pub export fn TextEncoder__encode16( @TypeOf(slice), slice, ) catch { - return JSC.toInvalidArguments("Out of memory", .{}, globalThis); + return globalThis.toInvalidArguments("Out of memory", .{}); }; return ArrayBuffer.fromBytes(bytes, .Uint8Array).toJSUnchecked(globalThis, null); } @@ -244,6 +245,14 @@ pub export fn TextEncoder__encodeInto8( return @bitCast(sized); } +comptime { + _ = &TextEncoder.TextEncoder__encode8; + _ = &TextEncoder.TextEncoder__encode16; + _ = &TextEncoder.TextEncoder__encodeInto8; + _ = &TextEncoder.TextEncoder__encodeInto16; + _ = &TextEncoder.TextEncoder__encodeRopeString; +} + const std = @import("std"); const bun = @import("bun"); const strings = bun.strings; diff --git a/src/bun.js/webcore/blob.zig b/src/bun.js/webcore/blob.zig deleted file mode 100644 index f2f06b6db2..0000000000 --- a/src/bun.js/webcore/blob.zig +++ /dev/null @@ -1,6478 +0,0 @@ -const std = @import("std"); -const Api = @import("../../api/schema.zig").Api; -const bun = @import("bun"); -const MimeType = http.MimeType; -const ZigURL = @import("../../url.zig").URL; -const http = bun.http; -const JSC = bun.JSC; -const io = bun.io; -const Method = @import("../../http/method.zig").Method; -const FetchHeaders = JSC.FetchHeaders; -const ObjectPool = @import("../../pool.zig").ObjectPool; -const SystemError = JSC.SystemError; -const Output = bun.Output; -const MutableString = bun.MutableString; -const strings = bun.strings; -const string = bun.string; -const default_allocator = bun.default_allocator; -const FeatureFlags = bun.FeatureFlags; -const ArrayBuffer = @import("../base.zig").ArrayBuffer; -const Properties = @import("../base.zig").Properties; -const getAllocator = @import("../base.zig").getAllocator; -const JSError = bun.JSError; - -const Environment = @import("../../env.zig"); -const ZigString = JSC.ZigString; -const IdentityContext = @import("../../identity_context.zig").IdentityContext; -const JSPromise = JSC.JSPromise; -const JSValue = JSC.JSValue; -const JSGlobalObject = JSC.JSGlobalObject; -const NullableAllocator = bun.NullableAllocator; - -const VirtualMachine = JSC.VirtualMachine; -const Task = JSC.Task; -const JSPrinter = bun.js_printer; -const picohttp = bun.picohttp; -const StringJoiner = bun.StringJoiner; -const uws = bun.uws; - -const invalid_fd = bun.invalid_fd; -const Response = JSC.WebCore.Response; -const Body = JSC.WebCore.Body; -const Request = JSC.WebCore.Request; - -const libuv = bun.windows.libuv; - -const S3 = bun.S3; -const S3Credentials = S3.S3Credentials; -const PathOrBlob = JSC.Node.PathOrBlob; -const PathLike = JSC.Node.PathLike; -const WriteFilePromise = @import("blob/WriteFile.zig").WriteFilePromise; -const WriteFileWaitFromLockedValueTask = @import("blob/WriteFile.zig").WriteFileWaitFromLockedValueTask; -const NewReadFileHandler = @import("blob/ReadFile.zig").NewReadFileHandler; - -const S3File = @import("S3File.zig"); - -pub const Blob = struct { - const bloblog = Output.scoped(.Blob, false); - - pub const new = bun.TrivialNew(@This()); - pub const js = JSC.Codegen.JSBlob; - // NOTE: toJS is overridden - pub const fromJS = js.fromJS; - pub const fromJSDirect = js.fromJSDirect; - - const rf = @import("blob/ReadFile.zig"); - pub const ReadFile = rf.ReadFile; - pub const ReadFileUV = rf.ReadFileUV; - pub const ReadFileTask = rf.ReadFileTask; - pub const ReadFileResultType = rf.ReadFileResultType; - - const wf = @import("blob/WriteFile.zig"); - pub const WriteFile = wf.WriteFile; - pub const WriteFileWindows = wf.WriteFileWindows; - pub const WriteFileTask = wf.WriteFileTask; - - pub const ClosingState = enum(u8) { - running, - closing, - }; - - reported_estimated_size: usize = 0, - - size: SizeType = 0, - offset: SizeType = 0, - /// When set, the blob will be freed on finalization callbacks - /// If the blob is contained in Response or Request, this must be null - allocator: ?std.mem.Allocator = null, - store: ?*Store = null, - content_type: string = "", - content_type_allocated: bool = false, - content_type_was_set: bool = false, - - /// JavaScriptCore strings are either latin1 or UTF-16 - /// When UTF-16, they're nearly always due to non-ascii characters - is_all_ascii: ?bool = null, - - /// Was it created via file constructor? - is_jsdom_file: bool = false, - - globalThis: *JSGlobalObject = undefined, - - last_modified: f64 = 0.0, - /// Blob name will lazy initialize when getName is called, but - /// we must be able to set the name, and we need to keep the value alive - /// https://github.com/oven-sh/bun/issues/10178 - name: bun.String = bun.String.dead, - - /// Max int of double precision - /// 9 petabytes is probably enough for awhile - /// We want to avoid coercing to a BigInt because that's a heap allocation - /// and it's generally just harder to use - pub const SizeType = u52; - pub const max_size = std.math.maxInt(SizeType); - - /// 1: Initial - /// 2: Added byte for whether it's a dom file, length and bytes for `stored_name`, - /// and f64 for `last_modified`. Removed reserved bytes, it's handled by version - /// number. - const serialization_version: u8 = 2; - - pub fn getFormDataEncoding(this: *Blob) ?*bun.FormData.AsyncFormData { - var content_type_slice: ZigString.Slice = this.getContentType() orelse return null; - defer content_type_slice.deinit(); - const encoding = bun.FormData.Encoding.get(content_type_slice.slice()) orelse return null; - return bun.FormData.AsyncFormData.init(this.allocator orelse bun.default_allocator, encoding) catch bun.outOfMemory(); - } - - pub fn hasContentTypeFromUser(this: *const Blob) bool { - return this.content_type_was_set or (this.store != null and (this.store.?.data == .file or this.store.?.data == .s3)); - } - - pub fn contentTypeOrMimeType(this: *const Blob) ?[]const u8 { - if (this.content_type.len > 0) { - return this.content_type; - } - if (this.store) |store| { - switch (store.data) { - .file => |file| { - return file.mime_type.value; - }, - .s3 => |s3| { - return s3.mime_type.value; - }, - else => return null, - } - } - return null; - } - - pub fn isBunFile(this: *const Blob) bool { - const store = this.store orelse return false; - - return store.data == .file; - } - - pub fn doReadFromS3(this: *Blob, comptime Function: anytype, global: *JSGlobalObject) JSValue { - bloblog("doReadFromS3", .{}); - - const WrappedFn = struct { - pub fn wrapped(b: *Blob, g: *JSGlobalObject, by: []u8) JSC.JSValue { - return JSC.toJSHostValue(g, Function(b, g, by, .clone)); - } - }; - return S3BlobDownloadTask.init(global, this, WrappedFn.wrapped); - } - pub fn doReadFile(this: *Blob, comptime Function: anytype, global: *JSGlobalObject) JSValue { - bloblog("doReadFile", .{}); - - const Handler = NewReadFileHandler(Function); - - var handler = bun.new(Handler, .{ - .context = this.*, - .globalThis = global, - }); - - if (Environment.isWindows) { - var promise = JSPromise.create(global); - const promise_value = promise.asValue(global); - promise_value.ensureStillAlive(); - handler.promise.strong.set(global, promise_value); - - ReadFileUV.start(handler.globalThis.bunVM().uvLoop(), this.store.?, this.offset, this.size, Handler, handler); - - return promise_value; - } - - const file_read = ReadFile.create( - bun.default_allocator, - this.store.?, - this.offset, - this.size, - *Handler, - handler, - Handler.run, - ) catch bun.outOfMemory(); - var read_file_task = ReadFileTask.createOnJSThread(bun.default_allocator, global, file_read) catch bun.outOfMemory(); - - // Create the Promise only after the store has been ref()'d. - // The garbage collector runs on memory allocations - // The JSPromise is the next GC'd memory allocation. - // This shouldn't really fix anything, but it's a little safer. - var promise = JSPromise.create(global); - const promise_value = promise.asValue(global); - promise_value.ensureStillAlive(); - handler.promise.strong.set(global, promise_value); - - read_file_task.schedule(); - - bloblog("doReadFile: read_file_task scheduled", .{}); - return promise_value; - } - - pub fn NewInternalReadFileHandler(comptime Context: type, comptime Function: anytype) type { - return struct { - pub fn run(handler: *anyopaque, bytes_: ReadFileResultType) void { - Function(bun.cast(Context, handler), bytes_); - } - }; - } - - pub fn doReadFileInternal(this: *Blob, comptime Handler: type, ctx: Handler, comptime Function: anytype, global: *JSGlobalObject) void { - if (Environment.isWindows) { - const ReadFileHandler = NewInternalReadFileHandler(Handler, Function); - return ReadFileUV.start(libuv.Loop.get(), this.store.?, this.offset, this.size, ReadFileHandler, ctx); - } - const file_read = ReadFile.createWithCtx( - bun.default_allocator, - this.store.?, - ctx, - NewInternalReadFileHandler(Handler, Function).run, - this.offset, - this.size, - ) catch bun.outOfMemory(); - var read_file_task = ReadFileTask.createOnJSThread(bun.default_allocator, global, file_read) catch bun.outOfMemory(); - read_file_task.schedule(); - } - - const FormDataContext = struct { - allocator: std.mem.Allocator, - joiner: StringJoiner, - boundary: []const u8, - failed: bool = false, - globalThis: *JSC.JSGlobalObject, - - pub fn onEntry(this: *FormDataContext, name: ZigString, entry: JSC.DOMFormData.FormDataEntry) void { - if (this.failed) return; - var globalThis = this.globalThis; - - const allocator = this.allocator; - const joiner = &this.joiner; - const boundary = this.boundary; - - joiner.pushStatic("--"); - joiner.pushStatic(boundary); // note: "static" here means "outlives the joiner" - joiner.pushStatic("\r\n"); - - joiner.pushStatic("Content-Disposition: form-data; name=\""); - const name_slice = name.toSlice(allocator); - joiner.push(name_slice.slice(), name_slice.allocator.get()); - - switch (entry) { - .string => |value| { - joiner.pushStatic("\"\r\n\r\n"); - const value_slice = value.toSlice(allocator); - joiner.push(value_slice.slice(), value_slice.allocator.get()); - }, - .file => |value| { - joiner.pushStatic("\"; filename=\""); - const filename_slice = value.filename.toSlice(allocator); - joiner.push(filename_slice.slice(), filename_slice.allocator.get()); - joiner.pushStatic("\"\r\n"); - - const blob = value.blob; - const content_type = if (blob.content_type.len > 0) blob.content_type else "application/octet-stream"; - joiner.pushStatic("Content-Type: "); - joiner.pushStatic(content_type); - joiner.pushStatic("\r\n\r\n"); - - if (blob.store) |store| { - if (blob.size == Blob.max_size) { - blob.resolveSize(); - } - switch (store.data) { - .s3 => |_| { - // TODO: s3 - // we need to make this async and use download/downloadSlice - }, - .file => |file| { - - // TODO: make this async + lazy - const res = JSC.Node.NodeFS.readFile( - globalThis.bunVM().nodeFS(), - .{ - .encoding = .buffer, - .path = file.pathlike, - .offset = blob.offset, - .max_size = blob.size, - }, - .sync, - ); - - switch (res) { - .err => |err| { - globalThis.throwValue(err.toJSC(globalThis)) catch {}; - this.failed = true; - }, - .result => |result| { - joiner.push(result.slice(), result.buffer.allocator); - }, - } - }, - .bytes => |_| { - joiner.pushStatic(blob.sharedView()); - }, - } - } - }, - } - - joiner.pushStatic("\r\n"); - } - }; - - pub fn getContentType( - this: *Blob, - ) ?ZigString.Slice { - if (this.content_type.len > 0) - return ZigString.Slice.fromUTF8NeverFree(this.content_type); - - return null; - } - - const StructuredCloneWriter = struct { - ctx: *anyopaque, - impl: *const fn (*anyopaque, ptr: [*]const u8, len: u32) callconv(JSC.conv) void, - - pub const WriteError = error{}; - pub fn write(this: StructuredCloneWriter, bytes: []const u8) WriteError!usize { - this.impl(this.ctx, bytes.ptr, @as(u32, @truncate(bytes.len))); - return bytes.len; - } - }; - - fn _onStructuredCloneSerialize( - this: *Blob, - comptime Writer: type, - writer: Writer, - ) !void { - try writer.writeInt(u8, serialization_version, .little); - - try writer.writeInt(u64, @intCast(this.offset), .little); - - try writer.writeInt(u32, @truncate(this.content_type.len), .little); - try writer.writeAll(this.content_type); - try writer.writeInt(u8, @intFromBool(this.content_type_was_set), .little); - - const store_tag: Store.SerializeTag = if (this.store) |store| - if (store.data == .file) .file else .bytes - else - .empty; - - try writer.writeInt(u8, @intFromEnum(store_tag), .little); - - this.resolveSize(); - if (this.store) |store| { - try store.serialize(Writer, writer); - } - - try writer.writeInt(u8, @intFromBool(this.is_jsdom_file), .little); - try writeFloat(f64, this.last_modified, Writer, writer); - } - - pub fn onStructuredCloneSerialize( - this: *Blob, - globalThis: *JSC.JSGlobalObject, - ctx: *anyopaque, - writeBytes: *const fn (*anyopaque, ptr: [*]const u8, len: u32) callconv(JSC.conv) void, - ) void { - _ = globalThis; - - const Writer = std.io.Writer(StructuredCloneWriter, StructuredCloneWriter.WriteError, StructuredCloneWriter.write); - const writer = Writer{ - .context = .{ - .ctx = ctx, - .impl = writeBytes, - }, - }; - - try _onStructuredCloneSerialize(this, Writer, writer); - } - - pub fn onStructuredCloneTransfer( - this: *Blob, - globalThis: *JSC.JSGlobalObject, - ctx: *anyopaque, - write: *const fn (*anyopaque, ptr: [*]const u8, len: usize) callconv(.C) void, - ) void { - _ = write; - _ = ctx; - _ = this; - _ = globalThis; - } - - fn writeFloat( - comptime FloatType: type, - value: FloatType, - comptime Writer: type, - writer: Writer, - ) !void { - const bytes: [@sizeOf(FloatType)]u8 = @bitCast(value); - try writer.writeAll(&bytes); - } - - fn readFloat( - comptime FloatType: type, - comptime Reader: type, - reader: Reader, - ) !FloatType { - const bytes = try reader.readBoundedBytes(@sizeOf(FloatType)); - return @bitCast(bytes.slice()[0..@sizeOf(FloatType)].*); - } - - fn readSlice( - reader: anytype, - len: usize, - allocator: std.mem.Allocator, - ) ![]u8 { - var slice = try allocator.alloc(u8, len); - slice = slice[0..try reader.read(slice)]; - if (slice.len != len) return error.TooSmall; - return slice; - } - - fn _onStructuredCloneDeserialize( - globalThis: *JSC.JSGlobalObject, - comptime Reader: type, - reader: Reader, - ) !JSValue { - const allocator = bun.default_allocator; - - const version = try reader.readInt(u8, .little); - - const offset = try reader.readInt(u64, .little); - - const content_type_len = try reader.readInt(u32, .little); - - const content_type = try readSlice(reader, content_type_len, allocator); - - const content_type_was_set: bool = try reader.readInt(u8, .little) != 0; - - const store_tag = try reader.readEnum(Store.SerializeTag, .little); - - const blob: *Blob = switch (store_tag) { - .bytes => bytes: { - const bytes_len = try reader.readInt(u32, .little); - const bytes = try readSlice(reader, bytes_len, allocator); - - const blob = Blob.init(bytes, allocator, globalThis); - - versions: { - if (version == 1) break :versions; - - const name_len = try reader.readInt(u32, .little); - const name = try readSlice(reader, name_len, allocator); - - if (blob.store) |store| switch (store.data) { - .bytes => |*bytes_store| bytes_store.stored_name = bun.PathString.init(name), - else => {}, - }; - - if (version == 2) break :versions; - } - - break :bytes Blob.new(blob); - }, - .file => file: { - const pathlike_tag = try reader.readEnum(JSC.Node.PathOrFileDescriptor.SerializeTag, .little); - - switch (pathlike_tag) { - .fd => { - const fd = try reader.readStruct(bun.FD); - - var path_or_fd = JSC.Node.PathOrFileDescriptor{ - .fd = fd, - }; - const blob = Blob.new(Blob.findOrCreateFileFromPath( - &path_or_fd, - globalThis, - true, - )); - - break :file blob; - }, - .path => { - const path_len = try reader.readInt(u32, .little); - - const path = try readSlice(reader, path_len, default_allocator); - var dest = JSC.Node.PathOrFileDescriptor{ - .path = .{ - .string = bun.PathString.init(path), - }, - }; - const blob = Blob.new(Blob.findOrCreateFileFromPath( - &dest, - globalThis, - true, - )); - - break :file blob; - }, - } - - return .zero; - }, - .empty => Blob.new(Blob.initEmpty(globalThis)), - }; - - versions: { - if (version == 1) break :versions; - - blob.is_jsdom_file = try reader.readInt(u8, .little) != 0; - blob.last_modified = try readFloat(f64, Reader, reader); - - if (version == 2) break :versions; - } - - blob.allocator = allocator; - blob.offset = @as(u52, @intCast(offset)); - if (content_type.len > 0) { - blob.content_type = content_type; - blob.content_type_allocated = true; - blob.content_type_was_set = content_type_was_set; - } - - return blob.toJS(globalThis); - } - - pub fn onStructuredCloneDeserialize(globalThis: *JSC.JSGlobalObject, ptr: [*]u8, end: [*]u8) bun.JSError!JSValue { - const total_length: usize = @intFromPtr(end) - @intFromPtr(ptr); - var buffer_stream = std.io.fixedBufferStream(ptr[0..total_length]); - const reader = buffer_stream.reader(); - - return _onStructuredCloneDeserialize(globalThis, @TypeOf(reader), reader) catch |err| switch (err) { - error.EndOfStream, error.TooSmall, error.InvalidValue => { - return globalThis.throw("Blob.onStructuredCloneDeserialize failed", .{}); - }, - error.OutOfMemory => { - return globalThis.throwOutOfMemory(); - }, - }; - } - - const URLSearchParamsConverter = struct { - allocator: std.mem.Allocator, - buf: []u8 = "", - globalThis: *JSC.JSGlobalObject, - pub fn convert(this: *URLSearchParamsConverter, str: ZigString) void { - var out = str.toSlice(this.allocator).cloneIfNeeded(this.allocator) catch bun.outOfMemory(); - this.buf = @constCast(out.slice()); - } - }; - - pub fn fromURLSearchParams( - globalThis: *JSC.JSGlobalObject, - allocator: std.mem.Allocator, - search_params: *JSC.URLSearchParams, - ) Blob { - var converter = URLSearchParamsConverter{ - .allocator = allocator, - .globalThis = globalThis, - }; - search_params.toString(URLSearchParamsConverter, &converter, URLSearchParamsConverter.convert); - var store = Blob.Store.init(converter.buf, allocator); - store.mime_type = MimeType.all.@"application/x-www-form-urlencoded"; - - var blob = Blob.initWithStore(store, globalThis); - blob.content_type = store.mime_type.value; - blob.content_type_was_set = true; - return blob; - } - - pub fn fromDOMFormData( - globalThis: *JSC.JSGlobalObject, - allocator: std.mem.Allocator, - form_data: *JSC.DOMFormData, - ) Blob { - var arena = bun.ArenaAllocator.init(allocator); - defer arena.deinit(); - var stack_allocator = std.heap.stackFallback(1024, arena.allocator()); - const stack_mem_all = stack_allocator.get(); - - var hex_buf: [70]u8 = undefined; - const boundary = brk: { - var random = globalThis.bunVM().rareData().nextUUID().bytes; - const formatter = std.fmt.fmtSliceHexLower(&random); - break :brk std.fmt.bufPrint(&hex_buf, "-WebkitFormBoundary{any}", .{formatter}) catch unreachable; - }; - - var context = FormDataContext{ - .allocator = allocator, - .joiner = .{ .allocator = stack_mem_all }, - .boundary = boundary, - .globalThis = globalThis, - }; - - form_data.forEach(FormDataContext, &context, FormDataContext.onEntry); - if (context.failed) { - return Blob.initEmpty(globalThis); - } - - context.joiner.pushStatic("--"); - context.joiner.pushStatic(boundary); - context.joiner.pushStatic("--\r\n"); - - const store = Blob.Store.init(context.joiner.done(allocator) catch bun.outOfMemory(), allocator); - var blob = Blob.initWithStore(store, globalThis); - blob.content_type = std.fmt.allocPrint(allocator, "multipart/form-data; boundary={s}", .{boundary}) catch bun.outOfMemory(); - blob.content_type_allocated = true; - blob.content_type_was_set = true; - - return blob; - } - - pub fn contentType(this: *const Blob) string { - return this.content_type; - } - - pub fn isDetached(this: *const Blob) bool { - return this.store == null; - } - - export fn Blob__dupeFromJS(value: JSC.JSValue) ?*Blob { - const this = Blob.fromJS(value) orelse return null; - return Blob__dupe(this); - } - - export fn Blob__setAsFile(this: *Blob, path_str: *bun.String) *Blob { - this.is_jsdom_file = true; - - // This is not 100% correct... - if (this.store) |store| { - if (store.data == .bytes) { - if (store.data.bytes.stored_name.len == 0) { - var utf8 = path_str.toUTF8WithoutRef(bun.default_allocator).clone(bun.default_allocator) catch unreachable; - store.data.bytes.stored_name = bun.PathString.init(utf8.slice()); - } - } - } - - return this; - } - - export fn Blob__dupe(ptr: *anyopaque) *Blob { - const this = bun.cast(*Blob, ptr); - const new_ptr = new(this.dupeWithContentType(true)); - new_ptr.allocator = bun.default_allocator; - return new_ptr; - } - - export fn Blob__destroy(this: *Blob) void { - this.finalize(); - } - - export fn Blob__getFileNameString(this: *Blob) callconv(.C) bun.String { - if (this.getFileName()) |filename| { - return bun.String.fromBytes(filename); - } - - return bun.String.empty; - } - - comptime { - _ = Blob__dupeFromJS; - _ = Blob__destroy; - _ = Blob__dupe; - _ = Blob__setAsFile; - _ = Blob__getFileNameString; - } - - pub fn writeFormatForSize(is_jdom_file: bool, size: usize, writer: anytype, comptime enable_ansi_colors: bool) !void { - if (is_jdom_file) { - try writer.writeAll(comptime Output.prettyFmt("File", enable_ansi_colors)); - } else { - try writer.writeAll(comptime Output.prettyFmt("Blob", enable_ansi_colors)); - } - try writer.print( - comptime Output.prettyFmt(" ({any})", enable_ansi_colors), - .{ - bun.fmt.size(size, .{}), - }, - ); - } - - pub fn writeFormat(this: *Blob, comptime Formatter: type, formatter: *Formatter, writer: anytype, comptime enable_ansi_colors: bool) !void { - const Writer = @TypeOf(writer); - - if (this.isDetached()) { - if (this.is_jsdom_file) { - try writer.writeAll(comptime Output.prettyFmt("[File detached]", enable_ansi_colors)); - } else { - try writer.writeAll(comptime Output.prettyFmt("[Blob detached]", enable_ansi_colors)); - } - return; - } - - { - const store = this.store.?; - switch (store.data) { - .s3 => |*s3| { - try S3File.writeFormat(s3, Formatter, formatter, writer, enable_ansi_colors, this.content_type, this.offset); - }, - .file => |file| { - try writer.writeAll(comptime Output.prettyFmt("FileRef", enable_ansi_colors)); - switch (file.pathlike) { - .path => |path| { - try writer.print( - comptime Output.prettyFmt(" (\"{s}\")", enable_ansi_colors), - .{ - path.slice(), - }, - ); - }, - .fd => |fd| { - if (Environment.isWindows) { - switch (fd.decodeWindows()) { - .uv => |uv_file| try writer.print( - comptime Output.prettyFmt(" (fd: {d})", enable_ansi_colors), - .{uv_file}, - ), - .windows => |handle| { - if (Environment.isDebug) { - @panic("this shouldn't be reachable."); - } - try writer.print( - comptime Output.prettyFmt(" (fd: 0x{x})", enable_ansi_colors), - .{@intFromPtr(handle)}, - ); - }, - } - } else { - try writer.print( - comptime Output.prettyFmt(" (fd: {d})", enable_ansi_colors), - .{fd.native()}, - ); - } - }, - } - }, - .bytes => { - try writeFormatForSize(this.is_jsdom_file, this.size, writer, enable_ansi_colors); - }, - } - } - - const show_name = (this.is_jsdom_file and this.getNameString() != null) or (!this.name.isEmpty() and this.store != null and this.store.?.data == .bytes); - if (!this.isS3() and (this.content_type.len > 0 or this.offset > 0 or show_name or this.last_modified != 0.0)) { - try writer.writeAll(" {\n"); - { - formatter.indent += 1; - defer formatter.indent -= 1; - - if (show_name) { - try formatter.writeIndent(Writer, writer); - - try writer.print( - comptime Output.prettyFmt("name: \"{}\"", enable_ansi_colors), - .{ - this.getNameString() orelse bun.String.empty, - }, - ); - - if (this.content_type.len > 0 or this.offset > 0 or this.last_modified != 0) { - try formatter.printComma(Writer, writer, enable_ansi_colors); - } - - try writer.writeAll("\n"); - } - - if (this.content_type.len > 0) { - try formatter.writeIndent(Writer, writer); - try writer.print( - comptime Output.prettyFmt("type: \"{s}\"", enable_ansi_colors), - .{ - this.content_type, - }, - ); - - if (this.offset > 0 or this.last_modified != 0) { - try formatter.printComma(Writer, writer, enable_ansi_colors); - } - - try writer.writeAll("\n"); - } - - if (this.offset > 0) { - try formatter.writeIndent(Writer, writer); - - try writer.print( - comptime Output.prettyFmt("offset: {d}\n", enable_ansi_colors), - .{ - this.offset, - }, - ); - - if (this.last_modified != 0) { - try formatter.printComma(Writer, writer, enable_ansi_colors); - } - - try writer.writeAll("\n"); - } - - if (this.last_modified != 0) { - try formatter.writeIndent(Writer, writer); - - try writer.print( - comptime Output.prettyFmt("lastModified: {d}\n", enable_ansi_colors), - .{ - this.last_modified, - }, - ); - } - } - - try formatter.writeIndent(Writer, writer); - try writer.writeAll("}"); - } - } - - const Retry = enum { @"continue", fail, no }; - - // we choose not to inline this so that the path buffer is not on the stack unless necessary. - noinline fn mkdirIfNotExists(this: anytype, err: bun.sys.Error, path_string: [:0]const u8, err_path: []const u8) Retry { - if (err.getErrno() == .NOENT and this.mkdirp_if_not_exists) { - if (std.fs.path.dirname(path_string)) |dirname| { - var node_fs: JSC.Node.NodeFS = .{}; - switch (node_fs.mkdirRecursive( - JSC.Node.Arguments.Mkdir{ - .path = .{ .string = bun.PathString.init(dirname) }, - .recursive = true, - .always_return_none = true, - }, - )) { - .result => { - this.mkdirp_if_not_exists = false; - return .@"continue"; - }, - .err => |err2| { - if (comptime @hasField(@TypeOf(this.*), "errno")) { - this.errno = bun.errnoToZigErr(err2.errno); - } - this.system_error = err.withPath(err_path).toSystemError(); - if (comptime @hasField(@TypeOf(this.*), "opened_fd")) { - this.opened_fd = invalid_fd; - } - return .fail; - }, - } - } - } - return .no; - } - - /// Write an empty string to a file by truncating it. - /// - /// This behavior matches what we do with the fast path. - /// - /// Returns an encoded `*JSPromise` that resolves if the file - /// - doesn't exist and is created - /// - exists and is truncated - fn writeFileWithEmptySourceToDestination( - ctx: *JSC.JSGlobalObject, - destination_blob: *Blob, - options: WriteFileOptions, - ) JSC.JSValue { - // SAFETY: null-checked by caller - const destination_store = destination_blob.store.?; - defer destination_blob.detach(); - - switch (destination_store.data) { - .file => |file| { - // TODO: make this async - const node_fs = ctx.bunVM().nodeFS(); - var result = node_fs.truncate(.{ - .path = file.pathlike, - .len = 0, - .flags = bun.O.CREAT, - }, .sync); - - if (result == .err) { - const errno = result.err.getErrno(); - var was_eperm = false; - err: switch (errno) { - // truncate might return EPERM when the parent directory doesn't exist - // #6336 - .PERM => { - was_eperm = true; - result.err.errno = @intCast(@intFromEnum(bun.C.E.NOENT)); - continue :err .NOENT; - }, - .NOENT => { - if (options.mkdirp_if_not_exists == false) break :err; - // NOTE: if .err is PERM, it ~should~ really is a - // permissions issue - const dirpath: []const u8 = switch (file.pathlike) { - .path => |path| std.fs.path.dirname(path.slice()) orelse break :err, - .fd => { - // NOTE: if this is an fd, it means the file - // exists, so we shouldn't try to mkdir it - // also means PERM is _actually_ a - // permissions issue - if (was_eperm) result.err.errno = @intCast(@intFromEnum(bun.C.E.PERM)); - break :err; - }, - }; - const mkdir_result = node_fs.mkdirRecursive(.{ - .path = .{ .string = bun.PathString.init(dirpath) }, - // TODO: Do we really want .mode to be 0o777? - .recursive = true, - .always_return_none = true, - }); - if (mkdir_result == .err) { - result.err = mkdir_result.err; - break :err; - } - - // SAFETY: we check if `file.pathlike` is an fd or - // not above, returning if it is. - var buf: bun.PathBuffer = undefined; - // TODO: respect `options.mode` - const mode: bun.Mode = JSC.Node.default_permission; - while (true) { - const open_res = bun.sys.open(file.pathlike.path.sliceZ(&buf), bun.O.CREAT | bun.O.TRUNC, mode); - switch (open_res) { - // errors fall through and are handled below - .err => |err| { - if (err.getErrno() == .INTR) continue; - result.err = open_res.err; - break :err; - }, - .result => |fd| { - fd.close(); - return JSC.JSPromise.resolvedPromiseValue(ctx, .jsNumber(0)); - }, - } - } - }, - else => {}, - } - - result.err = result.err.withPathLike(file.pathlike); - return JSC.JSPromise.dangerouslyCreateRejectedPromiseValueWithoutNotifyingVM(ctx, result.toJS(ctx)); - } - }, - .s3 => |*s3| { - - // create empty file - var aws_options = s3.getCredentialsWithOptions(options.extra_options, ctx) catch |err| { - return JSC.JSPromise.dangerouslyCreateRejectedPromiseValueWithoutNotifyingVM(ctx, ctx.takeException(err)); - }; - defer aws_options.deinit(); - - const Wrapper = struct { - promise: JSC.JSPromise.Strong, - store: *Store, - global: *JSC.JSGlobalObject, - - pub const new = bun.TrivialNew(@This()); - - pub fn resolve(result: S3.S3UploadResult, opaque_this: *anyopaque) void { - const this: *@This() = @ptrCast(@alignCast(opaque_this)); - switch (result) { - .success => this.promise.resolve(this.global, JSC.jsNumber(0)), - .failure => |err| this.promise.reject(this.global, err.toJS(this.global, this.store.getPath())), - } - this.deinit(); - } - - fn deinit(this: *@This()) void { - this.promise.deinit(); - this.store.deref(); - bun.destroy(this); - } - }; - - const promise = JSC.JSPromise.Strong.init(ctx); - const promise_value = promise.value(); - const proxy = ctx.bunVM().transpiler.env.getHttpProxy(true, null); - const proxy_url = if (proxy) |p| p.href else null; - destination_store.ref(); - S3.upload( - &aws_options.credentials, - s3.path(), - "", - destination_blob.contentTypeOrMimeType(), - aws_options.acl, - proxy_url, - aws_options.storage_class, - Wrapper.resolve, - Wrapper.new(.{ - .promise = promise, - .store = destination_store, - .global = ctx, - }), - ); - return promise_value; - }, - // Writing to a buffer-backed blob should be a type error, - // making this unreachable. TODO: `{}` -> `unreachable` - .bytes => {}, - } - - return JSC.JSPromise.resolvedPromiseValue(ctx, JSC.JSValue.jsNumber(0)); - } - - pub fn writeFileWithSourceDestination( - ctx: *JSC.JSGlobalObject, - source_blob: *Blob, - destination_blob: *Blob, - options: WriteFileOptions, - ) JSC.JSValue { - const destination_store = destination_blob.store orelse Output.panic("Destination blob is detached", .{}); - const destination_type = std.meta.activeTag(destination_store.data); - - // TODO: make sure this invariant isn't being broken elsewhere (outside - // its usage from `Blob.writeFileInternal`), then upgrade this to - // Environment.allow_assert - if (Environment.isDebug) { - bun.assertf(destination_type != .bytes, "Cannot write to a Blob backed by a Buffer or TypedArray. This is a bug in the caller. Please report it to the Bun team.", .{}); - } - - const source_store = source_blob.store orelse return writeFileWithEmptySourceToDestination(ctx, destination_blob, options); - const source_type = std.meta.activeTag(source_store.data); - - if (destination_type == .file and source_type == .bytes) { - var write_file_promise = bun.new(WriteFilePromise, .{ - .globalThis = ctx, - }); - - if (comptime Environment.isWindows) { - var promise = JSPromise.create(ctx); - const promise_value = promise.asValue(ctx); - promise_value.ensureStillAlive(); - write_file_promise.promise.strong.set(ctx, promise_value); - _ = WriteFileWindows.create( - ctx.bunVM().eventLoop(), - destination_blob.*, - source_blob.*, - *WriteFilePromise, - write_file_promise, - &WriteFilePromise.run, - options.mkdirp_if_not_exists orelse true, - ); - return promise_value; - } - - const file_copier = WriteFile.create( - destination_blob.*, - source_blob.*, - *WriteFilePromise, - write_file_promise, - WriteFilePromise.run, - options.mkdirp_if_not_exists orelse true, - ) catch unreachable; - var task = WriteFileTask.createOnJSThread(bun.default_allocator, ctx, file_copier) catch bun.outOfMemory(); - // Defer promise creation until we're just about to schedule the task - var promise = JSC.JSPromise.create(ctx); - const promise_value = promise.asValue(ctx); - write_file_promise.promise.strong.set(ctx, promise_value); - promise_value.ensureStillAlive(); - task.schedule(); - return promise_value; - } - // If this is file <> file, we can just copy the file - else if (destination_type == .file and source_type == .file) { - if (comptime Environment.isWindows) { - return Store.CopyFileWindows.init( - destination_store, - source_store, - ctx.bunVM().eventLoop(), - options.mkdirp_if_not_exists orelse true, - destination_blob.size, - ); - } - var file_copier = Store.CopyFile.create( - bun.default_allocator, - destination_store, - source_store, - destination_blob.offset, - destination_blob.size, - ctx, - options.mkdirp_if_not_exists orelse true, - ) catch unreachable; - file_copier.schedule(); - return file_copier.promise.value(); - } else if (destination_type == .file and source_type == .s3) { - const s3 = &source_store.data.s3; - if (JSC.WebCore.ReadableStream.fromJS(JSC.WebCore.ReadableStream.fromBlob( - ctx, - source_blob, - @truncate(s3.options.partSize), - ), ctx)) |stream| { - return destination_blob.pipeReadableStreamToBlob(ctx, stream, options.extra_options); - } else { - return JSC.JSPromise.dangerouslyCreateRejectedPromiseValueWithoutNotifyingVM(ctx, ctx.createErrorInstance("Failed to stream bytes from s3 bucket", .{})); - } - } else if (destination_type == .bytes and source_type == .bytes) { - // If this is bytes <> bytes, we can just duplicate it - // this is an edgecase - // it will happen if someone did Bun.write(new Blob([123]), new Blob([456])) - // eventually, this could be like Buffer.concat - var clone = source_blob.dupe(); - clone.allocator = bun.default_allocator; - const cloned = Blob.new(clone); - cloned.allocator = bun.default_allocator; - return JSPromise.resolvedPromiseValue(ctx, cloned.toJS(ctx)); - } else if (destination_type == .bytes and (source_type == .file or source_type == .s3)) { - const blob_value = source_blob.getSliceFrom(ctx, 0, 0, "", false); - - return JSPromise.resolvedPromiseValue( - ctx, - blob_value, - ); - } else if (destination_type == .s3) { - const s3 = &destination_store.data.s3; - var aws_options = s3.getCredentialsWithOptions(options.extra_options, ctx) catch |err| { - return JSC.JSPromise.dangerouslyCreateRejectedPromiseValueWithoutNotifyingVM(ctx, ctx.takeException(err)); - }; - defer aws_options.deinit(); - const proxy = ctx.bunVM().transpiler.env.getHttpProxy(true, null); - const proxy_url = if (proxy) |p| p.href else null; - switch (source_store.data) { - .bytes => |bytes| { - if (bytes.len > S3.MultiPartUploadOptions.MAX_SINGLE_UPLOAD_SIZE) { - if (JSC.WebCore.ReadableStream.fromJS(JSC.WebCore.ReadableStream.fromBlob( - ctx, - source_blob, - @truncate(s3.options.partSize), - ), ctx)) |stream| { - return S3.uploadStream( - (if (options.extra_options != null) aws_options.credentials.dupe() else s3.getCredentials()), - s3.path(), - stream, - ctx, - aws_options.options, - aws_options.acl, - aws_options.storage_class, - destination_blob.contentTypeOrMimeType(), - proxy_url, - null, - undefined, - ); - } else { - return JSC.JSPromise.dangerouslyCreateRejectedPromiseValueWithoutNotifyingVM(ctx, ctx.createErrorInstance("Failed to stream bytes to s3 bucket", .{})); - } - } else { - const Wrapper = struct { - store: *Store, - promise: JSC.JSPromise.Strong, - global: *JSC.JSGlobalObject, - - pub const new = bun.TrivialNew(@This()); - - pub fn resolve(result: S3.S3UploadResult, opaque_self: *anyopaque) void { - const this: *@This() = @ptrCast(@alignCast(opaque_self)); - switch (result) { - .success => this.promise.resolve(this.global, JSC.jsNumber(this.store.data.bytes.len)), - .failure => |err| this.promise.reject(this.global, err.toJS(this.global, this.store.getPath())), - } - this.deinit(); - } - - fn deinit(this: *@This()) void { - this.promise.deinit(); - this.store.deref(); - } - }; - source_store.ref(); - const promise = JSC.JSPromise.Strong.init(ctx); - const promise_value = promise.value(); - - S3.upload( - &aws_options.credentials, - s3.path(), - bytes.slice(), - destination_blob.contentTypeOrMimeType(), - aws_options.acl, - proxy_url, - aws_options.storage_class, - Wrapper.resolve, - Wrapper.new(.{ - .store = source_store, - .promise = promise, - .global = ctx, - }), - ); - return promise_value; - } - }, - .file, .s3 => { - // stream - if (JSC.WebCore.ReadableStream.fromJS(JSC.WebCore.ReadableStream.fromBlob( - ctx, - source_blob, - @truncate(s3.options.partSize), - ), ctx)) |stream| { - return S3.uploadStream( - (if (options.extra_options != null) aws_options.credentials.dupe() else s3.getCredentials()), - s3.path(), - stream, - ctx, - s3.options, - aws_options.acl, - aws_options.storage_class, - destination_blob.contentTypeOrMimeType(), - proxy_url, - null, - undefined, - ); - } else { - return JSC.JSPromise.dangerouslyCreateRejectedPromiseValueWithoutNotifyingVM(ctx, ctx.createErrorInstance("Failed to stream bytes to s3 bucket", .{})); - } - }, - } - } - - unreachable; - } - - const WriteFileOptions = struct { - mkdirp_if_not_exists: ?bool = null, - extra_options: ?JSValue = null, - }; - - /// ## Errors - /// - If `path_or_blob` is a detached blob - /// ## Panics - /// - If `path_or_blob` is a `Blob` backed by a byte store - pub fn writeFileInternal(globalThis: *JSC.JSGlobalObject, path_or_blob_: *PathOrBlob, data: JSC.JSValue, options: WriteFileOptions) bun.JSError!JSC.JSValue { - if (data.isEmptyOrUndefinedOrNull()) { - return globalThis.throwInvalidArguments("Bun.write(pathOrFdOrBlob, blob) expects a Blob-y thing to write", .{}); - } - var path_or_blob = path_or_blob_.*; - if (path_or_blob == .blob) { - const blob_store = path_or_blob.blob.store orelse { - return globalThis.throwInvalidArguments("Blob is detached", .{}); - }; - bun.assertWithLocation(blob_store.data != .bytes, @src()); - // TODO only reset last_modified on success paths instead of - // resetting last_modified at the beginning for better performance. - if (blob_store.data == .file) { - // reset last_modified to force getLastModified() to reload after writing. - blob_store.data.file.last_modified = JSC.init_timestamp; - } - } - - const input_store: ?*Store = if (path_or_blob == .blob) path_or_blob.blob.store else null; - if (input_store) |st| st.ref(); - defer if (input_store) |st| st.deref(); - - var needs_async = false; - - if (options.mkdirp_if_not_exists) |mkdir| { - if (mkdir and - path_or_blob == .blob and - path_or_blob.blob.store != null and - path_or_blob.blob.store.?.data == .file and - path_or_blob.blob.store.?.data.file.pathlike == .fd) - { - return globalThis.throwInvalidArguments("Cannot create a directory for a file descriptor", .{}); - } - } - - // If you're doing Bun.write(), try to go fast by writing short input on the main thread. - // This is a heuristic, but it's a good one. - // - // except if you're on Windows. Windows I/O is slower. Let's not even try. - if (comptime !Environment.isWindows) { - if (path_or_blob == .path or - // If they try to set an offset, its a little more complicated so let's avoid that - (path_or_blob.blob.offset == 0 and !path_or_blob.blob.isS3() and - // Is this a file that is known to be a pipe? Let's avoid blocking the main thread on it. - !(path_or_blob.blob.store != null and - path_or_blob.blob.store.?.data == .file and - path_or_blob.blob.store.?.data.file.mode != 0 and - bun.isRegularFile(path_or_blob.blob.store.?.data.file.mode)))) - { - if (data.isString()) { - const len = data.getLength(globalThis); - - if (len < 256 * 1024) { - const str = try data.toBunString(globalThis); - defer str.deref(); - - const pathlike: JSC.Node.PathOrFileDescriptor = if (path_or_blob == .path) - path_or_blob.path - else - path_or_blob.blob.store.?.data.file.pathlike; - - if (pathlike == .path) { - const result = writeStringToFileFast( - globalThis, - pathlike, - str, - &needs_async, - true, - ); - if (!needs_async) { - return result; - } - } else { - const result = writeStringToFileFast( - globalThis, - pathlike, - str, - &needs_async, - false, - ); - if (!needs_async) { - return result; - } - } - } - } else if (data.asArrayBuffer(globalThis)) |buffer_view| { - if (buffer_view.byte_len < 256 * 1024) { - const pathlike: JSC.Node.PathOrFileDescriptor = if (path_or_blob == .path) - path_or_blob.path - else - path_or_blob.blob.store.?.data.file.pathlike; - - if (pathlike == .path) { - const result = writeBytesToFileFast( - globalThis, - pathlike, - buffer_view.byteSlice(), - &needs_async, - true, - ); - - if (!needs_async) { - return result; - } - } else { - const result = writeBytesToFileFast( - globalThis, - pathlike, - buffer_view.byteSlice(), - &needs_async, - false, - ); - - if (!needs_async) { - return result; - } - } - } - } - } - } - - // if path_or_blob is a path, convert it into a file blob - var destination_blob: Blob = if (path_or_blob == .path) brk: { - const new_blob = Blob.findOrCreateFileFromPath(&path_or_blob_.path, globalThis, true); - if (new_blob.store == null) { - return globalThis.throwInvalidArguments("Writing to an empty blob is not implemented yet", .{}); - } - break :brk new_blob; - } else path_or_blob.blob.dupe(); - - if (bun.Environment.allow_assert and path_or_blob == .blob) { - // sanity check. Should never happen because - // 1. destination blobs passed via path_or_blob are null checked at the very start - // 2. newly created blobs from paths get null checked immediately after creation. - bun.unsafeAssert(path_or_blob.blob.store != null); - } - - // TODO: implement a writeev() fast path - var source_blob: Blob = brk: { - if (data.as(Response)) |response| { - switch (response.body.value) { - .WTFStringImpl, - .InternalBlob, - .Used, - .Empty, - .Blob, - .Null, - => { - break :brk response.body.use(); - }, - .Error => |*err_ref| { - destination_blob.detach(); - _ = response.body.value.use(); - return JSC.JSPromise.dangerouslyCreateRejectedPromiseValueWithoutNotifyingVM(globalThis, err_ref.toJS(globalThis)); - }, - .Locked => |*locked| { - if (destination_blob.isS3()) { - const s3 = &destination_blob.store.?.data.s3; - var aws_options = try s3.getCredentialsWithOptions(options.extra_options, globalThis); - defer aws_options.deinit(); - _ = response.body.value.toReadableStream(globalThis); - if (locked.readable.get(globalThis)) |readable| { - if (readable.isDisturbed(globalThis)) { - destination_blob.detach(); - return globalThis.throwInvalidArguments("ReadableStream has already been used", .{}); - } - const proxy = globalThis.bunVM().transpiler.env.getHttpProxy(true, null); - const proxy_url = if (proxy) |p| p.href else null; - - return S3.uploadStream( - (if (options.extra_options != null) aws_options.credentials.dupe() else s3.getCredentials()), - s3.path(), - readable, - globalThis, - aws_options.options, - aws_options.acl, - aws_options.storage_class, - destination_blob.contentTypeOrMimeType(), - proxy_url, - null, - undefined, - ); - } - destination_blob.detach(); - return globalThis.throwInvalidArguments("ReadableStream has already been used", .{}); - } - var task = bun.new(WriteFileWaitFromLockedValueTask, .{ - .globalThis = globalThis, - .file_blob = destination_blob, - .promise = JSC.JSPromise.Strong.init(globalThis), - .mkdirp_if_not_exists = options.mkdirp_if_not_exists orelse true, - }); - - response.body.value.Locked.task = task; - response.body.value.Locked.onReceiveValue = WriteFileWaitFromLockedValueTask.thenWrap; - return task.promise.value(); - }, - } - } - - if (data.as(Request)) |request| { - switch (request.body.value) { - .WTFStringImpl, - .InternalBlob, - .Used, - .Empty, - .Blob, - .Null, - => { - break :brk request.body.value.use(); - }, - .Error => |*err_ref| { - destination_blob.detach(); - _ = request.body.value.use(); - return JSC.JSPromise.dangerouslyCreateRejectedPromiseValueWithoutNotifyingVM(globalThis, err_ref.toJS(globalThis)); - }, - .Locked => |locked| { - if (destination_blob.isS3()) { - const s3 = &destination_blob.store.?.data.s3; - var aws_options = try s3.getCredentialsWithOptions(options.extra_options, globalThis); - defer aws_options.deinit(); - _ = request.body.value.toReadableStream(globalThis); - if (locked.readable.get(globalThis)) |readable| { - if (readable.isDisturbed(globalThis)) { - destination_blob.detach(); - return globalThis.throwInvalidArguments("ReadableStream has already been used", .{}); - } - const proxy = globalThis.bunVM().transpiler.env.getHttpProxy(true, null); - const proxy_url = if (proxy) |p| p.href else null; - return S3.uploadStream( - (if (options.extra_options != null) aws_options.credentials.dupe() else s3.getCredentials()), - s3.path(), - readable, - globalThis, - aws_options.options, - aws_options.acl, - aws_options.storage_class, - destination_blob.contentTypeOrMimeType(), - proxy_url, - null, - undefined, - ); - } - destination_blob.detach(); - return globalThis.throwInvalidArguments("ReadableStream has already been used", .{}); - } - var task = bun.new(WriteFileWaitFromLockedValueTask, .{ - .globalThis = globalThis, - .file_blob = destination_blob, - .promise = JSC.JSPromise.Strong.init(globalThis), - .mkdirp_if_not_exists = options.mkdirp_if_not_exists orelse true, - }); - - request.body.value.Locked.task = task; - request.body.value.Locked.onReceiveValue = WriteFileWaitFromLockedValueTask.thenWrap; - - return task.promise.value(); - }, - } - } - - break :brk Blob.get( - globalThis, - data, - false, - false, - ) catch |err| { - if (err == error.InvalidArguments) { - return globalThis.throwInvalidArguments("Expected an Array", .{}); - } - return globalThis.throwOutOfMemory(); - }; - }; - defer source_blob.detach(); - - const destination_store = destination_blob.store; - if (destination_store) |store| { - store.ref(); - } - - defer { - if (destination_store) |store| { - store.deref(); - } - } - - return writeFileWithSourceDestination(globalThis, &source_blob, &destination_blob, options); - } - - /// `Bun.write(destination, input, options?)` - pub fn writeFile(globalThis: *JSC.JSGlobalObject, callframe: *JSC.CallFrame) bun.JSError!JSC.JSValue { - const arguments = callframe.arguments(); - var args = JSC.Node.ArgumentsSlice.init(globalThis.bunVM(), arguments); - defer args.deinit(); - - // accept a path or a blob - var path_or_blob = try PathOrBlob.fromJSNoCopy(globalThis, &args); - defer { - if (path_or_blob == .path) { - path_or_blob.path.deinit(); - } - } - // "Blob" must actually be a BunFile, not a webcore blob. - if (path_or_blob == .blob) { - const store = path_or_blob.blob.store orelse { - return globalThis.throw("Cannot write to a detached Blob", .{}); - }; - if (store.data == .bytes) { - return globalThis.throwInvalidArguments("Cannot write to a Blob backed by bytes, which are always read-only", .{}); - } - } - - const data = args.nextEat() orelse { - return globalThis.throwInvalidArguments("Bun.write(pathOrFdOrBlob, blob) expects a Blob-y thing to write", .{}); - }; - var mkdirp_if_not_exists: ?bool = null; - const options = args.nextEat(); - if (options) |options_object| { - if (options_object.isObject()) { - if (try options_object.getTruthy(globalThis, "createPath")) |create_directory| { - if (!create_directory.isBoolean()) { - return globalThis.throwInvalidArgumentType("write", "options.createPath", "boolean"); - } - mkdirp_if_not_exists = create_directory.toBoolean(); - } - } else if (!options_object.isEmptyOrUndefinedOrNull()) { - return globalThis.throwInvalidArgumentType("write", "options", "object"); - } - } - return writeFileInternal(globalThis, &path_or_blob, data, .{ - .mkdirp_if_not_exists = mkdirp_if_not_exists, - .extra_options = options, - }); - } - - const write_permissions = 0o664; - - fn writeStringToFileFast( - globalThis: *JSC.JSGlobalObject, - pathlike: JSC.Node.PathOrFileDescriptor, - str: bun.String, - needs_async: *bool, - comptime needs_open: bool, - ) JSC.JSValue { - const fd: bun.FileDescriptor = if (comptime !needs_open) pathlike.fd else brk: { - var file_path: bun.PathBuffer = undefined; - switch (bun.sys.open( - pathlike.path.sliceZ(&file_path), - // we deliberately don't use O_TRUNC here - // it's a perf optimization - bun.O.WRONLY | bun.O.CREAT | bun.O.NONBLOCK, - write_permissions, - )) { - .result => |result| { - break :brk result; - }, - .err => |err| { - if (err.getErrno() == .NOENT) { - needs_async.* = true; - return .zero; - } - - return JSC.JSPromise.dangerouslyCreateRejectedPromiseValueWithoutNotifyingVM( - globalThis, - err.withPath(pathlike.path.slice()).toJSC(globalThis), - ); - }, - } - unreachable; - }; - - var truncate = needs_open or str.isEmpty(); - const jsc_vm = globalThis.bunVM(); - var written: usize = 0; - - defer { - // we only truncate if it's a path - // if it's a file descriptor, we assume they want manual control over that behavior - if (truncate) { - _ = fd.truncate(@intCast(written)); - } - if (needs_open) { - fd.close(); - } - } - if (!str.isEmpty()) { - var decoded = str.toUTF8(jsc_vm.allocator); - defer decoded.deinit(); - - var remain = decoded.slice(); - while (remain.len > 0) { - const result = bun.sys.write(fd, remain); - switch (result) { - .result => |res| { - written += res; - remain = remain[res..]; - if (res == 0) break; - }, - .err => |err| { - truncate = false; - if (err.getErrno() == .AGAIN) { - needs_async.* = true; - return .zero; - } - if (comptime !needs_open) { - return JSC.JSPromise.dangerouslyCreateRejectedPromiseValueWithoutNotifyingVM(globalThis, err.toJSC(globalThis)); - } - return JSC.JSPromise.dangerouslyCreateRejectedPromiseValueWithoutNotifyingVM( - globalThis, - err.withPath(pathlike.path.slice()).toJSC(globalThis), - ); - }, - } - } - } - - return JSC.JSPromise.resolvedPromiseValue(globalThis, JSC.JSValue.jsNumber(written)); - } - - fn writeBytesToFileFast( - globalThis: *JSC.JSGlobalObject, - pathlike: JSC.Node.PathOrFileDescriptor, - bytes: []const u8, - needs_async: *bool, - comptime needs_open: bool, - ) JSC.JSValue { - const fd: bun.FileDescriptor = if (comptime !needs_open) pathlike.fd else brk: { - var file_path: bun.PathBuffer = undefined; - switch (bun.sys.open( - pathlike.path.sliceZ(&file_path), - if (!Environment.isWindows) - // we deliberately don't use O_TRUNC here - // it's a perf optimization - bun.O.WRONLY | bun.O.CREAT | bun.O.NONBLOCK - else - bun.O.WRONLY | bun.O.CREAT, - write_permissions, - )) { - .result => |result| { - break :brk result; - }, - .err => |err| { - if (!Environment.isWindows) { - if (err.getErrno() == .NOENT) { - needs_async.* = true; - return .zero; - } - } - - return JSC.JSPromise.dangerouslyCreateRejectedPromiseValueWithoutNotifyingVM( - globalThis, - err.withPath(pathlike.path.slice()).toJSC(globalThis), - ); - }, - } - }; - - // TODO: on windows this is always synchronous - - const truncate = needs_open or bytes.len == 0; - var written: usize = 0; - defer if (needs_open) fd.close(); - - var remain = bytes; - const end = remain.ptr + remain.len; - - while (remain.ptr != end) { - const result = bun.sys.write(fd, remain); - switch (result) { - .result => |res| { - written += res; - remain = remain[res..]; - if (res == 0) break; - }, - .err => |err| { - if (!Environment.isWindows) { - if (err.getErrno() == .AGAIN) { - needs_async.* = true; - return .zero; - } - } - if (comptime !needs_open) { - return JSC.JSPromise.dangerouslyCreateRejectedPromiseValueWithoutNotifyingVM( - globalThis, - err.toJSC(globalThis), - ); - } - return JSC.JSPromise.dangerouslyCreateRejectedPromiseValueWithoutNotifyingVM( - globalThis, - err.withPath(pathlike.path.slice()).toJSC(globalThis), - ); - }, - } - } - - if (truncate) { - if (Environment.isWindows) { - _ = std.os.windows.kernel32.SetEndOfFile(fd.cast()); - } else { - _ = bun.sys.ftruncate(fd, @as(i64, @intCast(written))); - } - } - - return JSC.JSPromise.resolvedPromiseValue(globalThis, JSC.JSValue.jsNumber(written)); - } - export fn JSDOMFile__construct(globalThis: *JSC.JSGlobalObject, callframe: *JSC.CallFrame) callconv(JSC.conv) ?*Blob { - return JSDOMFile__construct_(globalThis, callframe) catch |err| switch (err) { - error.JSError => null, - error.OutOfMemory => { - globalThis.throwOutOfMemory() catch {}; - return null; - }, - }; - } - pub fn JSDOMFile__construct_(globalThis: *JSC.JSGlobalObject, callframe: *JSC.CallFrame) bun.JSError!*Blob { - JSC.markBinding(@src()); - const allocator = bun.default_allocator; - var blob: Blob = undefined; - var arguments = callframe.arguments_old(3); - const args = arguments.slice(); - - if (args.len < 2) { - return globalThis.throwInvalidArguments("new File(bits, name) expects at least 2 arguments", .{}); - } - { - const name_value_str = try bun.String.fromJS(args[1], globalThis); - defer name_value_str.deref(); - - blob = get(globalThis, args[0], false, true) catch |err| switch (err) { - error.JSError, error.OutOfMemory => |e| return e, - error.InvalidArguments => { - return globalThis.throwInvalidArguments("new Blob() expects an Array", .{}); - }, - }; - if (blob.store) |store_| { - switch (store_.data) { - .bytes => |*bytes| { - bytes.stored_name = bun.PathString.init( - (name_value_str.toUTF8WithoutRef(bun.default_allocator).clone(bun.default_allocator) catch bun.outOfMemory()).slice(), - ); - }, - .s3, .file => { - blob.name = name_value_str.dupeRef(); - }, - } - } else if (!name_value_str.isEmpty()) { - // not store but we have a name so we need a store - blob.store = Blob.Store.new(.{ - .data = .{ - .bytes = Blob.ByteStore.initEmptyWithName( - bun.PathString.init( - (name_value_str.toUTF8WithoutRef(bun.default_allocator).clone(bun.default_allocator) catch bun.outOfMemory()).slice(), - ), - allocator, - ), - }, - .allocator = allocator, - .ref_count = .init(1), - }); - } - } - - var set_last_modified = false; - - if (args.len > 2) { - const options = args[2]; - if (options.isObject()) { - // type, the ASCII-encoded string in lower case - // representing the media type of the Blob. - // Normative conditions for this member are provided - // in the § 3.1 Constructors. - if (try options.get(globalThis, "type")) |content_type| { - inner: { - if (content_type.isString()) { - var content_type_str = try content_type.toSlice(globalThis, bun.default_allocator); - defer content_type_str.deinit(); - const slice = content_type_str.slice(); - if (!strings.isAllASCII(slice)) { - break :inner; - } - blob.content_type_was_set = true; - - if (globalThis.bunVM().mimeType(slice)) |mime| { - blob.content_type = mime.value; - break :inner; - } - const content_type_buf = allocator.alloc(u8, slice.len) catch bun.outOfMemory(); - blob.content_type = strings.copyLowercase(slice, content_type_buf); - blob.content_type_allocated = true; - } - } - } - - if (try options.getTruthy(globalThis, "lastModified")) |last_modified| { - set_last_modified = true; - blob.last_modified = last_modified.coerce(f64, globalThis); - } - } - } - - if (!set_last_modified) { - // `lastModified` should be the current date in milliseconds if unspecified. - // https://developer.mozilla.org/en-US/docs/Web/API/File/lastModified - blob.last_modified = @floatFromInt(std.time.milliTimestamp()); - } - - if (blob.content_type.len == 0) { - blob.content_type = ""; - blob.content_type_was_set = false; - } - - var blob_ = Blob.new(blob); - blob_.allocator = allocator; - blob_.is_jsdom_file = true; - return blob_; - } - - fn calculateEstimatedByteSize(this: *Blob) void { - // in-memory size. not the size on disk. - var size: usize = @sizeOf(Blob); - - if (this.store) |store| { - size += @sizeOf(Blob.Store); - switch (store.data) { - .bytes => { - size += store.data.bytes.stored_name.estimatedSize(); - size += if (this.size != Blob.max_size) - this.size - else - store.data.bytes.len; - }, - .file => size += store.data.file.pathlike.estimatedSize(), - .s3 => size += store.data.s3.estimatedSize(), - } - } - - this.reported_estimated_size = size + (this.content_type.len * @intFromBool(this.content_type_allocated)) + this.name.byteSlice().len; - } - - pub fn estimatedSize(this: *Blob) usize { - return this.reported_estimated_size; - } - - comptime { - _ = JSDOMFile__hasInstance; - } - - pub fn constructBunFile( - globalObject: *JSC.JSGlobalObject, - callframe: *JSC.CallFrame, - ) bun.JSError!JSC.JSValue { - var vm = globalObject.bunVM(); - const arguments = callframe.arguments_old(2).slice(); - var args = JSC.Node.ArgumentsSlice.init(vm, arguments); - defer args.deinit(); - - var path = (try JSC.Node.PathOrFileDescriptor.fromJS(globalObject, &args, bun.default_allocator)) orelse { - return globalObject.throwInvalidArguments("Expected file path string or file descriptor", .{}); - }; - const options = if (arguments.len >= 2) arguments[1] else null; - - if (path == .path) { - if (strings.hasPrefixComptime(path.path.slice(), "s3://")) { - return try S3File.constructInternalJS(globalObject, path.path, options); - } - } - defer path.deinitAndUnprotect(); - - var blob = Blob.findOrCreateFileFromPath(&path, globalObject, false); - - if (options) |opts| { - if (opts.isObject()) { - if (try opts.getTruthy(globalObject, "type")) |file_type| { - inner: { - if (file_type.isString()) { - var allocator = bun.default_allocator; - var str = try file_type.toSlice(globalObject, bun.default_allocator); - defer str.deinit(); - const slice = str.slice(); - if (!strings.isAllASCII(slice)) { - break :inner; - } - blob.content_type_was_set = true; - if (vm.mimeType(str.slice())) |entry| { - blob.content_type = entry.value; - break :inner; - } - const content_type_buf = allocator.alloc(u8, slice.len) catch bun.outOfMemory(); - blob.content_type = strings.copyLowercase(slice, content_type_buf); - blob.content_type_allocated = true; - } - } - } - if (try opts.getTruthy(globalObject, "lastModified")) |last_modified| { - blob.last_modified = last_modified.coerce(f64, globalObject); - } - } - } - - var ptr = Blob.new(blob); - ptr.allocator = bun.default_allocator; - return ptr.toJS(globalObject); - } - - pub fn findOrCreateFileFromPath(path_or_fd: *JSC.Node.PathOrFileDescriptor, globalThis: *JSGlobalObject, comptime check_s3: bool) Blob { - var vm = globalThis.bunVM(); - const allocator = bun.default_allocator; - if (check_s3) { - if (path_or_fd.* == .path) { - if (strings.startsWith(path_or_fd.path.slice(), "s3://")) { - const credentials = globalThis.bunVM().transpiler.env.getS3Credentials(); - const copy = path_or_fd.*; - path_or_fd.* = .{ .path = .{ .string = bun.PathString.empty } }; - return Blob.initWithStore(Blob.Store.initS3(copy.path, null, credentials, allocator) catch bun.outOfMemory(), globalThis); - } - } - } - const path: JSC.Node.PathOrFileDescriptor = brk: { - switch (path_or_fd.*) { - .path => { - var slice = path_or_fd.path.slice(); - - if (Environment.isWindows and bun.strings.eqlComptime(slice, "/dev/null")) { - path_or_fd.deinit(); - path_or_fd.* = .{ - .path = .{ - // this memory is freed with this allocator in `Blob.Store.deinit` - .string = bun.PathString.init(allocator.dupe(u8, "\\\\.\\NUL") catch bun.outOfMemory()), - }, - }; - slice = path_or_fd.path.slice(); - } - - if (vm.standalone_module_graph) |graph| { - if (graph.find(slice)) |file| { - defer { - if (path_or_fd.path != .string) { - path_or_fd.deinit(); - path_or_fd.* = .{ .path = .{ .string = bun.PathString.empty } }; - } - } - - return file.blob(globalThis).dupe(); - } - } - - path_or_fd.toThreadSafe(); - const copy = path_or_fd.*; - path_or_fd.* = .{ .path = .{ .string = bun.PathString.empty } }; - break :brk copy; - }, - .fd => { - if (path_or_fd.fd.stdioTag()) |tag| { - const store = switch (tag) { - .std_in => vm.rareData().stdin(), - .std_err => vm.rareData().stderr(), - .std_out => vm.rareData().stdout(), - }; - store.ref(); - return Blob.initWithStore(store, globalThis); - } - break :brk path_or_fd.*; - }, - } - }; - - return Blob.initWithStore(Blob.Store.initFile(path, null, allocator) catch bun.outOfMemory(), globalThis); - } - - pub const Store = struct { - data: Data, - - mime_type: MimeType = MimeType.none, - ref_count: std.atomic.Value(u32) = std.atomic.Value(u32).init(1), - is_all_ascii: ?bool = null, - allocator: std.mem.Allocator, - - pub const new = bun.TrivialNew(@This()); - - pub fn memoryCost(this: *const Store) usize { - return if (this.hasOneRef()) @sizeOf(@This()) + switch (this.data) { - .bytes => this.data.bytes.len, - .file => 0, - .s3 => |s3| s3.estimatedSize(), - } else 0; - } - - pub fn getPath(this: *const Store) ?[]const u8 { - return switch (this.data) { - .bytes => |*bytes| if (bytes.stored_name.len > 0) bytes.stored_name.slice() else null, - .file => |*file| if (file.pathlike == .path) file.pathlike.path.slice() else null, - .s3 => |*s3| s3.pathlike.slice(), - }; - } - - pub fn size(this: *const Store) SizeType { - return switch (this.data) { - .bytes => this.data.bytes.len, - .s3, .file => Blob.max_size, - }; - } - - pub const Map = std.HashMap(u64, *JSC.WebCore.Blob.Store, IdentityContext(u64), 80); - - pub const Data = union(enum) { - bytes: ByteStore, - file: FileStore, - s3: S3Store, - }; - - pub fn ref(this: *Store) void { - const old = this.ref_count.fetchAdd(1, .monotonic); - assert(old > 0); - } - - pub fn hasOneRef(this: *const Store) bool { - return this.ref_count.load(.monotonic) == 1; - } - - /// Caller is responsible for derefing the Store. - pub fn toAnyBlob(this: *Store) ?AnyBlob { - if (this.hasOneRef()) { - if (this.data == .bytes) { - return .{ .InternalBlob = this.data.bytes.toInternalBlob() }; - } - } - - return null; - } - - pub fn external(ptr: ?*anyopaque, _: ?*anyopaque, _: usize) callconv(.C) void { - if (ptr == null) return; - var this = bun.cast(*Store, ptr); - this.deref(); - } - pub fn initS3WithReferencedCredentials(pathlike: JSC.Node.PathLike, mime_type: ?http.MimeType, credentials: *S3Credentials, allocator: std.mem.Allocator) !*Store { - var path = pathlike; - // this actually protects/refs the pathlike - path.toThreadSafe(); - - const store = Blob.Store.new(.{ - .data = .{ - .s3 = S3Store.initWithReferencedCredentials( - path, - mime_type orelse brk: { - const sliced = path.slice(); - if (sliced.len > 0) { - var extname = std.fs.path.extension(sliced); - extname = std.mem.trim(u8, extname, "."); - if (http.MimeType.byExtensionNoDefault(extname)) |mime| { - break :brk mime; - } - } - break :brk null; - }, - credentials, - ), - }, - .allocator = allocator, - .ref_count = std.atomic.Value(u32).init(1), - }); - return store; - } - pub fn initS3(pathlike: JSC.Node.PathLike, mime_type: ?http.MimeType, credentials: S3Credentials, allocator: std.mem.Allocator) !*Store { - var path = pathlike; - // this actually protects/refs the pathlike - path.toThreadSafe(); - - const store = Blob.Store.new(.{ - .data = .{ - .s3 = S3Store.init( - path, - mime_type orelse brk: { - const sliced = path.slice(); - if (sliced.len > 0) { - var extname = std.fs.path.extension(sliced); - extname = std.mem.trim(u8, extname, "."); - if (http.MimeType.byExtensionNoDefault(extname)) |mime| { - break :brk mime; - } - } - break :brk null; - }, - credentials, - ), - }, - .allocator = allocator, - .ref_count = std.atomic.Value(u32).init(1), - }); - return store; - } - pub fn initFile(pathlike: JSC.Node.PathOrFileDescriptor, mime_type: ?http.MimeType, allocator: std.mem.Allocator) !*Store { - const store = Blob.Store.new(.{ - .data = .{ - .file = FileStore.init( - pathlike, - mime_type orelse brk: { - if (pathlike == .path) { - const sliced = pathlike.path.slice(); - if (sliced.len > 0) { - var extname = std.fs.path.extension(sliced); - extname = std.mem.trim(u8, extname, "."); - if (http.MimeType.byExtensionNoDefault(extname)) |mime| { - break :brk mime; - } - } - } - - break :brk null; - }, - ), - }, - .allocator = allocator, - .ref_count = std.atomic.Value(u32).init(1), - }); - return store; - } - - /// Takes ownership of `bytes`, which must have been allocated with `allocator`. - pub fn init(bytes: []u8, allocator: std.mem.Allocator) *Store { - const store = Blob.Store.new(.{ - .data = .{ - .bytes = ByteStore.init(bytes, allocator), - }, - .allocator = allocator, - .ref_count = .init(1), - }); - return store; - } - - pub fn sharedView(this: Store) []u8 { - if (this.data == .bytes) - return this.data.bytes.slice(); - - return &[_]u8{}; - } - - pub fn deref(this: *Blob.Store) void { - const old = this.ref_count.fetchSub(1, .monotonic); - assert(old >= 1); - if (old == 1) { - this.deinit(); - } - } - - pub fn deinit(this: *Blob.Store) void { - const allocator = this.allocator; - - switch (this.data) { - .bytes => |*bytes| { - bytes.deinit(); - }, - .file => |file| { - if (file.pathlike == .path) { - if (file.pathlike.path == .string) { - allocator.free(@constCast(file.pathlike.path.slice())); - } else { - file.pathlike.path.deinit(); - } - } - }, - .s3 => |*s3| { - s3.deinit(allocator); - }, - } - - bun.destroy(this); - } - - const SerializeTag = enum(u8) { - file = 0, - bytes = 1, - empty = 2, - }; - - pub fn serialize(this: *Store, comptime Writer: type, writer: Writer) !void { - switch (this.data) { - .file => |file| { - const pathlike_tag: JSC.Node.PathOrFileDescriptor.SerializeTag = if (file.pathlike == .fd) .fd else .path; - try writer.writeInt(u8, @intFromEnum(pathlike_tag), .little); - - switch (file.pathlike) { - .fd => |fd| { - try writer.writeStruct(fd); - }, - .path => |path| { - const path_slice = path.slice(); - try writer.writeInt(u32, @as(u32, @truncate(path_slice.len)), .little); - try writer.writeAll(path_slice); - }, - } - }, - .s3 => |s3| { - const pathlike_tag: JSC.Node.PathOrFileDescriptor.SerializeTag = .path; - try writer.writeInt(u8, @intFromEnum(pathlike_tag), .little); - - const path_slice = s3.pathlike.slice(); - try writer.writeInt(u32, @as(u32, @truncate(path_slice.len)), .little); - try writer.writeAll(path_slice); - }, - .bytes => |bytes| { - const slice = bytes.slice(); - try writer.writeInt(u32, @truncate(slice.len), .little); - try writer.writeAll(slice); - - try writer.writeInt(u32, @truncate(bytes.stored_name.slice().len), .little); - try writer.writeAll(bytes.stored_name.slice()); - }, - } - } - - pub fn fromArrayList(list: std.ArrayListUnmanaged(u8), allocator: std.mem.Allocator) !*Blob.Store { - return try Blob.Store.init(list.items, allocator); - } - - pub fn FileOpener(comptime This: type) type { - return struct { - context: *This, - - const State = @This(); - - const __opener_flags = bun.O.NONBLOCK | bun.O.CLOEXEC; - - const open_flags_ = if (@hasDecl(This, "open_flags")) - This.open_flags | __opener_flags - else - bun.O.RDONLY | __opener_flags; - - fn getFdByOpening(this: *This, comptime Callback: OpenCallback) void { - var buf: bun.PathBuffer = undefined; - var path_string = if (@hasField(This, "file_store")) - this.file_store.pathlike.path - else - this.file_blob.store.?.data.file.pathlike.path; - - const path = path_string.sliceZ(&buf); - - if (Environment.isWindows) { - const WrappedCallback = struct { - pub fn callback(req: *libuv.fs_t) callconv(.C) void { - var self: *This = @alignCast(@ptrCast(req.data.?)); - { - defer req.deinit(); - if (req.result.errEnum()) |errEnum| { - var path_string_2 = if (@hasField(This, "file_store")) - self.file_store.pathlike.path - else - self.file_blob.store.?.data.file.pathlike.path; - self.errno = bun.errnoToZigErr(errEnum); - self.system_error = bun.sys.Error.fromCode(errEnum, .open) - .withPath(path_string_2.slice()) - .toSystemError(); - self.opened_fd = invalid_fd; - } else { - self.opened_fd = req.result.toFD(); - } - } - Callback(self, self.opened_fd); - } - }; - - const rc = libuv.uv_fs_open( - this.loop, - &this.req, - path, - open_flags_, - JSC.Node.default_permission, - &WrappedCallback.callback, - ); - if (rc.errEnum()) |errno| { - this.errno = bun.errnoToZigErr(errno); - this.system_error = bun.sys.Error.fromCode(errno, .open).withPath(path_string.slice()).toSystemError(); - this.opened_fd = invalid_fd; - Callback(this, invalid_fd); - } - this.req.data = @ptrCast(this); - return; - } - - while (true) { - this.opened_fd = switch (bun.sys.open(path, open_flags_, JSC.Node.default_permission)) { - .result => |fd| fd, - .err => |err| { - if (comptime @hasField(This, "mkdirp_if_not_exists")) { - if (err.errno == @intFromEnum(bun.C.E.NOENT)) { - switch (mkdirIfNotExists(this, err, path, path_string.slice())) { - .@"continue" => continue, - .fail => { - this.opened_fd = invalid_fd; - break; - }, - .no => {}, - } - } - } - - this.errno = bun.errnoToZigErr(err.errno); - this.system_error = err.withPath(path_string.slice()).toSystemError(); - this.opened_fd = invalid_fd; - break; - }, - }; - break; - } - - Callback(this, this.opened_fd); - } - - const OpenCallback = *const fn (*This, bun.FileDescriptor) void; - - pub fn getFd(this: *This, comptime Callback: OpenCallback) void { - if (this.opened_fd != invalid_fd) { - Callback(this, this.opened_fd); - return; - } - - if (@hasField(This, "file_store")) { - const pathlike = this.file_store.pathlike; - if (pathlike == .fd) { - this.opened_fd = pathlike.fd; - Callback(this, this.opened_fd); - return; - } - } else { - const pathlike = this.file_blob.store.?.data.file.pathlike; - if (pathlike == .fd) { - this.opened_fd = pathlike.fd; - Callback(this, this.opened_fd); - return; - } - } - - getFdByOpening(this, Callback); - } - }; - } - - pub fn FileCloser(comptime This: type) type { - return struct { - fn scheduleClose(request: *io.Request) io.Action { - var this: *This = @alignCast(@fieldParentPtr("io_request", request)); - return io.Action{ - .close = .{ - .ctx = this, - .fd = this.opened_fd, - .onDone = @ptrCast(&onIORequestClosed), - .poll = &this.io_poll, - .tag = This.io_tag, - }, - }; - } - - fn onIORequestClosed(this: *This) void { - this.io_poll.flags.remove(.was_ever_registered); - this.task = .{ .callback = &onCloseIORequest }; - bun.JSC.WorkPool.schedule(&this.task); - } - - fn onCloseIORequest(task: *JSC.WorkPoolTask) void { - bloblog("onCloseIORequest()", .{}); - var this: *This = @alignCast(@fieldParentPtr("task", task)); - this.close_after_io = false; - this.update(); - } - - pub fn doClose(this: *This, is_allowed_to_close_fd: bool) bool { - if (@hasField(This, "io_request")) { - if (this.close_after_io) { - this.state.store(ClosingState.closing, .seq_cst); - - @atomicStore(@TypeOf(this.io_request.callback), &this.io_request.callback, &scheduleClose, .seq_cst); - if (!this.io_request.scheduled) - io.Loop.get().schedule(&this.io_request); - return true; - } - } - - if (is_allowed_to_close_fd and - this.opened_fd != invalid_fd and - this.opened_fd.stdioTag() == null) - { - if (comptime Environment.isWindows) { - bun.Async.Closer.close(this.opened_fd, this.loop); - } else { - _ = this.opened_fd.closeAllowingBadFileDescriptor(null); - } - this.opened_fd = invalid_fd; - } - - return false; - } - }; - } - - pub const IOWhich = enum { - source, - destination, - both, - }; - - pub const CopyFileWindows = struct { - destination_file_store: *Store, - source_file_store: *Store, - - io_request: libuv.fs_t = std.mem.zeroes(libuv.fs_t), - promise: JSC.JSPromise.Strong = .{}, - mkdirp_if_not_exists: bool = false, - event_loop: *JSC.EventLoop, - - size: Blob.SizeType = Blob.max_size, - - /// For mkdirp - err: ?bun.sys.Error = null, - - /// When we are unable to get the original file path, we do a read-write loop that uses libuv. - read_write_loop: ReadWriteLoop = .{}, - - pub const ReadWriteLoop = struct { - source_fd: bun.FileDescriptor = invalid_fd, - must_close_source_fd: bool = false, - destination_fd: bun.FileDescriptor = invalid_fd, - must_close_destination_fd: bool = false, - written: usize = 0, - read_buf: std.ArrayList(u8) = std.ArrayList(u8).init(default_allocator), - uv_buf: libuv.uv_buf_t = .{ .base = undefined, .len = 0 }, - - pub fn start(read_write_loop: *ReadWriteLoop, this: *CopyFileWindows) JSC.Maybe(void) { - read_write_loop.read_buf.ensureTotalCapacityPrecise(64 * 1024) catch bun.outOfMemory(); - - return read(read_write_loop, this); - } - - pub fn read(read_write_loop: *ReadWriteLoop, this: *CopyFileWindows) JSC.Maybe(void) { - read_write_loop.read_buf.items.len = 0; - read_write_loop.uv_buf = libuv.uv_buf_t.init(read_write_loop.read_buf.allocatedSlice()); - const loop = this.event_loop.virtual_machine.event_loop_handle.?; - - // This io_request is used for both reading and writing. - // For now, we don't start reading the next chunk until - // we've finished writing all the previous chunks. - this.io_request.data = @ptrCast(this); - - const rc = libuv.uv_fs_read( - loop, - &this.io_request, - read_write_loop.source_fd.uv(), - @ptrCast(&read_write_loop.uv_buf), - 1, - -1, - &onRead, - ); - - if (rc.toError(.read)) |err| { - return .{ .err = err }; - } - - return .{ .result = {} }; - } - - fn onRead(req: *libuv.fs_t) callconv(.C) void { - var this: *CopyFileWindows = @fieldParentPtr("io_request", req); - bun.assert(req.data == @as(?*anyopaque, @ptrCast(this))); - - const source_fd = this.read_write_loop.source_fd; - const destination_fd = this.read_write_loop.destination_fd; - const read_buf = &this.read_write_loop.read_buf.items; - - const event_loop = this.event_loop; - - const rc = req.result; - - bun.sys.syslog("uv_fs_read({}, {d}) = {d}", .{ source_fd, read_buf.len, rc.int() }); - if (rc.toError(.read)) |err| { - this.err = err; - this.onReadWriteLoopComplete(); - return; - } - - read_buf.len = @intCast(rc.int()); - this.read_write_loop.uv_buf = libuv.uv_buf_t.init(read_buf.*); - - if (rc.int() == 0) { - // Handle EOF. We can't read any more. - this.onReadWriteLoopComplete(); - return; - } - - // Re-use the fs request. - req.deinit(); - const rc2 = libuv.uv_fs_write( - event_loop.virtual_machine.event_loop_handle.?, - &this.io_request, - destination_fd.uv(), - @ptrCast(&this.read_write_loop.uv_buf), - 1, - -1, - &onWrite, - ); - req.data = @ptrCast(this); - - if (rc2.toError(.write)) |err| { - this.err = err; - this.onReadWriteLoopComplete(); - return; - } - } - - fn onWrite(req: *libuv.fs_t) callconv(.C) void { - var this: *CopyFileWindows = @fieldParentPtr("io_request", req); - bun.assert(req.data == @as(?*anyopaque, @ptrCast(this))); - const buf = &this.read_write_loop.read_buf.items; - - const destination_fd = this.read_write_loop.destination_fd; - - const rc = req.result; - - bun.sys.syslog("uv_fs_write({}, {d}) = {d}", .{ destination_fd, buf.len, rc.int() }); - - if (rc.toError(.write)) |err| { - this.err = err; - this.onReadWriteLoopComplete(); - return; - } - - const wrote: u32 = @intCast(rc.int()); - - this.read_write_loop.written += wrote; - - if (wrote < buf.len) { - if (wrote == 0) { - // Handle EOF. We can't write any more. - this.onReadWriteLoopComplete(); - return; - } - - // Re-use the fs request. - req.deinit(); - req.data = @ptrCast(this); - - this.read_write_loop.uv_buf = libuv.uv_buf_t.init(this.read_write_loop.uv_buf.slice()[wrote..]); - const rc2 = libuv.uv_fs_write( - this.event_loop.virtual_machine.event_loop_handle.?, - &this.io_request, - destination_fd.uv(), - @ptrCast(&this.read_write_loop.uv_buf), - 1, - -1, - &onWrite, - ); - - if (rc2.toError(.write)) |err| { - this.err = err; - this.onReadWriteLoopComplete(); - return; - } - - return; - } - - req.deinit(); - switch (this.read_write_loop.read(this)) { - .err => |err| { - this.err = err; - this.onReadWriteLoopComplete(); - }, - .result => {}, - } - } - - pub fn close(this: *ReadWriteLoop) void { - if (this.must_close_source_fd) { - if (this.source_fd.makeLibUVOwned()) |fd| { - bun.Async.Closer.close( - fd, - bun.Async.Loop.get(), - ); - } else |_| { - this.source_fd.close(); - } - this.must_close_source_fd = false; - this.source_fd = invalid_fd; - } - - if (this.must_close_destination_fd) { - if (this.destination_fd.makeLibUVOwned()) |fd| { - bun.Async.Closer.close( - fd, - bun.Async.Loop.get(), - ); - } else |_| { - this.destination_fd.close(); - } - this.must_close_destination_fd = false; - this.destination_fd = invalid_fd; - } - - this.read_buf.clearAndFree(); - } - }; - - pub fn onReadWriteLoopComplete(this: *CopyFileWindows) void { - this.event_loop.unrefConcurrently(); - - if (this.err) |err| { - this.err = null; - this.throw(err); - return; - } - - this.onComplete(this.read_write_loop.written); - } - - pub const new = bun.TrivialNew(@This()); - - pub fn init( - destination_file_store: *Store, - source_file_store: *Store, - event_loop: *JSC.EventLoop, - mkdirp_if_not_exists: bool, - size_: Blob.SizeType, - ) JSC.JSValue { - destination_file_store.ref(); - source_file_store.ref(); - const result = CopyFileWindows.new(.{ - .destination_file_store = destination_file_store, - .source_file_store = source_file_store, - .promise = JSC.JSPromise.Strong.init(event_loop.global), - .io_request = std.mem.zeroes(libuv.fs_t), - .event_loop = event_loop, - .mkdirp_if_not_exists = mkdirp_if_not_exists, - .size = size_, - }); - const promise = result.promise.value(); - - // On error, this function might free the CopyFileWindows struct. - // So we can no longer reference it beyond this point. - result.copyfile(); - - return promise; - } - - fn preparePathlike(pathlike: *JSC.Node.PathOrFileDescriptor, must_close: *bool, is_reading: bool) JSC.Maybe(bun.FileDescriptor) { - if (pathlike.* == .path) { - const fd = switch (bun.sys.openatWindowsT( - u8, - bun.invalid_fd, - pathlike.path.slice(), - if (is_reading) - bun.O.RDONLY - else - bun.O.WRONLY | bun.O.CREAT, - 0, - )) { - .result => |result| result.makeLibUVOwned() catch { - result.close(); - return .{ - .err = .{ - .errno = @as(c_int, @intCast(@intFromEnum(bun.C.SystemErrno.EMFILE))), - .syscall = .open, - .path = pathlike.path.slice(), - }, - }; - }, - .err => |err| { - return .{ - .err = err, - }; - }, - }; - must_close.* = true; - return .{ .result = fd }; - } else { - // We assume that this is already a uv-casted file descriptor. - return .{ .result = pathlike.fd }; - } - } - - fn prepareReadWriteLoop(this: *CopyFileWindows) void { - // Open the destination first, so that if we need to call - // mkdirp(), we don't spend extra time opening the file handle for - // the source. - this.read_write_loop.destination_fd = switch (preparePathlike(&this.destination_file_store.data.file.pathlike, &this.read_write_loop.must_close_destination_fd, false)) { - .result => |fd| fd, - .err => |err| { - if (this.mkdirp_if_not_exists and err.getErrno() == .NOENT) { - this.mkdirp(); - return; - } - - this.throw(err); - return; - }, - }; - - this.read_write_loop.source_fd = switch (preparePathlike(&this.source_file_store.data.file.pathlike, &this.read_write_loop.must_close_source_fd, true)) { - .result => |fd| fd, - .err => |err| { - this.throw(err); - return; - }, - }; - - switch (this.read_write_loop.start(this)) { - .err => |err| { - this.throw(err); - return; - }, - .result => { - this.event_loop.refConcurrently(); - }, - } - } - - fn copyfile(this: *CopyFileWindows) void { - // This is for making it easier for us to test this code path - if (bun.getRuntimeFeatureFlag("BUN_FEATURE_FLAG_DISABLE_UV_FS_COPYFILE")) { - this.prepareReadWriteLoop(); - return; - } - - var pathbuf1: bun.PathBuffer = undefined; - var pathbuf2: bun.PathBuffer = undefined; - var destination_file_store = &this.destination_file_store.data.file; - var source_file_store = &this.source_file_store.data.file; - - const new_path: [:0]const u8 = brk: { - switch (destination_file_store.pathlike) { - .path => { - break :brk destination_file_store.pathlike.path.sliceZ(&pathbuf1); - }, - .fd => |fd| { - switch (bun.sys.File.from(fd).kind()) { - .err => |err| { - this.throw(err); - return; - }, - .result => |kind| { - switch (kind) { - .directory => { - this.throw(bun.sys.Error.fromCode(.ISDIR, .open)); - return; - }, - .character_device => { - this.prepareReadWriteLoop(); - return; - }, - else => { - const out = bun.getFdPath(fd, &pathbuf1) catch { - // This case can happen when either: - // - NUL device - // - Pipe. `cat foo.txt | bun bar.ts` - this.prepareReadWriteLoop(); - return; - }; - pathbuf1[out.len] = 0; - break :brk pathbuf1[0..out.len :0]; - }, - } - }, - } - }, - } - }; - const old_path: [:0]const u8 = brk: { - switch (source_file_store.pathlike) { - .path => { - break :brk source_file_store.pathlike.path.sliceZ(&pathbuf2); - }, - .fd => |fd| { - switch (bun.sys.File.from(fd).kind()) { - .err => |err| { - this.throw(err); - return; - }, - .result => |kind| { - switch (kind) { - .directory => { - this.throw(bun.sys.Error.fromCode(.ISDIR, .open)); - return; - }, - .character_device => { - this.prepareReadWriteLoop(); - return; - }, - else => { - const out = bun.getFdPath(fd, &pathbuf2) catch { - // This case can happen when either: - // - NUL device - // - Pipe. `cat foo.txt | bun bar.ts` - this.prepareReadWriteLoop(); - return; - }; - pathbuf2[out.len] = 0; - break :brk pathbuf2[0..out.len :0]; - }, - } - }, - } - }, - } - }; - const loop = this.event_loop.virtual_machine.event_loop_handle.?; - this.io_request.data = @ptrCast(this); - - const rc = libuv.uv_fs_copyfile( - loop, - &this.io_request, - old_path, - new_path, - 0, - &onCopyFile, - ); - - if (rc.errno()) |errno| { - this.throw(.{ - // #6336 - .errno = if (errno == @intFromEnum(bun.C.SystemErrno.EPERM)) - @as(c_int, @intCast(@intFromEnum(bun.C.SystemErrno.ENOENT))) - else - errno, - .syscall = .copyfile, - .path = old_path, - }); - return; - } - this.event_loop.refConcurrently(); - } - - pub fn throw(this: *CopyFileWindows, err: bun.sys.Error) void { - const globalThis = this.event_loop.global; - const promise = this.promise.swap(); - const err_instance = err.toJSC(globalThis); - var event_loop = this.event_loop; - event_loop.enter(); - defer event_loop.exit(); - this.deinit(); - promise.reject(globalThis, err_instance); - } - - fn onCopyFile(req: *libuv.fs_t) callconv(.C) void { - var this: *CopyFileWindows = @fieldParentPtr("io_request", req); - bun.assert(req.data == @as(?*anyopaque, @ptrCast(this))); - - var event_loop = this.event_loop; - event_loop.unrefConcurrently(); - const rc = req.result; - - bun.sys.syslog("uv_fs_copyfile() = {}", .{rc}); - if (rc.errEnum()) |errno| { - if (this.mkdirp_if_not_exists and errno == .NOENT) { - req.deinit(); - this.mkdirp(); - return; - } else { - var err = bun.sys.Error.fromCode( - // #6336 - if (errno == .PERM) .NOENT else errno, - - .copyfile, - ); - const destination = &this.destination_file_store.data.file; - - // we don't really know which one it is - if (destination.pathlike == .path) { - err = err.withPath(destination.pathlike.path.slice()); - } else if (destination.pathlike == .fd) { - err = err.withFd(destination.pathlike.fd); - } - - this.throw(err); - } - return; - } - - this.onComplete(req.statbuf.size); - } - - pub fn onComplete(this: *CopyFileWindows, written_actual: usize) void { - var written = written_actual; - if (written != @as(@TypeOf(written), @intCast(this.size)) and this.size != Blob.max_size) { - this.truncate(); - written = @intCast(this.size); - } - const globalThis = this.event_loop.global; - const promise = this.promise.swap(); - var event_loop = this.event_loop; - event_loop.enter(); - defer event_loop.exit(); - - this.deinit(); - promise.resolve(globalThis, JSC.JSValue.jsNumberFromUint64(written)); - } - - fn truncate(this: *CopyFileWindows) void { - // TODO: optimize this - @branchHint(.cold); - - var node_fs: JSC.Node.NodeFS = .{}; - _ = node_fs.truncate( - .{ - .path = this.destination_file_store.data.file.pathlike, - .len = @intCast(this.size), - }, - .sync, - ); - } - - pub fn deinit(this: *CopyFileWindows) void { - this.read_write_loop.close(); - this.destination_file_store.deref(); - this.source_file_store.deref(); - this.promise.deinit(); - this.io_request.deinit(); - bun.destroy(this); - } - - fn mkdirp( - this: *CopyFileWindows, - ) void { - bun.sys.syslog("mkdirp", .{}); - this.mkdirp_if_not_exists = false; - var destination = &this.destination_file_store.data.file; - if (destination.pathlike != .path) { - this.throw(.{ - .errno = @as(c_int, @intCast(@intFromEnum(bun.C.SystemErrno.EINVAL))), - .syscall = .mkdir, - }); - return; - } - - this.event_loop.refConcurrently(); - JSC.Node.Async.AsyncMkdirp.new(.{ - .completion = @ptrCast(&onMkdirpCompleteConcurrent), - .completion_ctx = this, - .path = bun.Dirname.dirname(u8, destination.pathlike.path.slice()) - // this shouldn't happen - orelse destination.pathlike.path.slice(), - }).schedule(); - } - - fn onMkdirpComplete(this: *CopyFileWindows) void { - this.event_loop.unrefConcurrently(); - - if (this.err) |err| { - this.throw(err); - bun.default_allocator.free(err.path); - return; - } - - this.copyfile(); - } - - fn onMkdirpCompleteConcurrent(this: *CopyFileWindows, err_: JSC.Maybe(void)) void { - bun.sys.syslog("mkdirp complete", .{}); - assert(this.err == null); - this.err = if (err_ == .err) err_.err else null; - this.event_loop.enqueueTaskConcurrent(JSC.ConcurrentTask.create(JSC.ManagedTask.New(CopyFileWindows, onMkdirpComplete).init(this))); - } - }; - - const unsupported_directory_error = SystemError{ - .errno = @as(c_int, @intCast(@intFromEnum(bun.C.SystemErrno.EISDIR))), - .message = bun.String.static("That doesn't work on folders"), - .syscall = bun.String.static("fstat"), - }; - const unsupported_non_regular_file_error = SystemError{ - .errno = @as(c_int, @intCast(@intFromEnum(bun.C.SystemErrno.ENOTSUP))), - .message = bun.String.static("Non-regular files aren't supported yet"), - .syscall = bun.String.static("fstat"), - }; - - pub const CopyFilePromiseTask = JSC.ConcurrentPromiseTask(CopyFile); - pub const CopyFilePromiseTaskEventLoopTask = CopyFilePromiseTask.EventLoopTask; - - // blocking, but off the main thread - pub const CopyFile = struct { - destination_file_store: FileStore, - source_file_store: FileStore, - store: ?*Store = null, - source_store: ?*Store = null, - offset: SizeType = 0, - size: SizeType = 0, - max_length: SizeType = Blob.max_size, - destination_fd: bun.FileDescriptor = invalid_fd, - source_fd: bun.FileDescriptor = invalid_fd, - - system_error: ?SystemError = null, - - read_len: SizeType = 0, - read_off: SizeType = 0, - - globalThis: *JSGlobalObject, - - mkdirp_if_not_exists: bool = false, - - pub const ResultType = anyerror!SizeType; - - pub const Callback = *const fn (ctx: *anyopaque, len: ResultType) void; - - pub fn create( - allocator: std.mem.Allocator, - store: *Store, - source_store: *Store, - off: SizeType, - max_len: SizeType, - globalThis: *JSC.JSGlobalObject, - mkdirp_if_not_exists: bool, - ) !*CopyFilePromiseTask { - const read_file = bun.new(CopyFile, CopyFile{ - .store = store, - .source_store = source_store, - .offset = off, - .max_length = max_len, - .globalThis = globalThis, - .destination_file_store = store.data.file, - .source_file_store = source_store.data.file, - .mkdirp_if_not_exists = mkdirp_if_not_exists, - }); - store.ref(); - source_store.ref(); - return CopyFilePromiseTask.createOnJSThread(allocator, globalThis, read_file) catch bun.outOfMemory(); - } - - const linux = std.os.linux; - const darwin = std.posix.system; - - pub fn deinit(this: *CopyFile) void { - if (this.source_file_store.pathlike == .path) { - if (this.source_file_store.pathlike.path == .string and this.system_error == null) { - bun.default_allocator.free(@constCast(this.source_file_store.pathlike.path.slice())); - } - } - this.store.?.deref(); - - bun.destroy(this); - } - - pub fn reject(this: *CopyFile, promise: *JSC.JSPromise) void { - const globalThis = this.globalThis; - var system_error: SystemError = this.system_error orelse SystemError{}; - if (this.source_file_store.pathlike == .path and system_error.path.isEmpty()) { - system_error.path = bun.String.createUTF8(this.source_file_store.pathlike.path.slice()); - } - - if (system_error.message.isEmpty()) { - system_error.message = bun.String.static("Failed to copy file"); - } - - const instance = system_error.toErrorInstance(this.globalThis); - if (this.store) |store| { - store.deref(); - } - promise.reject(globalThis, instance); - } - - pub fn then(this: *CopyFile, promise: *JSC.JSPromise) void { - this.source_store.?.deref(); - - if (this.system_error != null) { - this.reject(promise); - return; - } - - promise.resolve(this.globalThis, JSC.JSValue.jsNumberFromUint64(this.read_len)); - } - - pub fn run(this: *CopyFile) void { - this.runAsync(); - } - - pub fn doClose(this: *CopyFile) void { - const close_input = this.destination_file_store.pathlike != .fd and this.destination_fd != invalid_fd; - const close_output = this.source_file_store.pathlike != .fd and this.source_fd != invalid_fd; - - if (close_input and close_output) { - this.doCloseFile(.both); - } else if (close_input) { - this.doCloseFile(.destination); - } else if (close_output) { - this.doCloseFile(.source); - } - } - - const posix = std.posix; - - pub fn doCloseFile(this: *CopyFile, comptime which: IOWhich) void { - switch (which) { - .both => { - this.destination_fd.close(); - this.source_fd.close(); - }, - .destination => { - this.destination_fd.close(); - }, - .source => { - this.source_fd.close(); - }, - } - } - - const O = bun.O; - const open_destination_flags = O.CLOEXEC | O.CREAT | O.WRONLY | O.TRUNC; - const open_source_flags = O.CLOEXEC | O.RDONLY; - - pub fn doOpenFile(this: *CopyFile, comptime which: IOWhich) !void { - var path_buf1: bun.PathBuffer = undefined; - // open source file first - // if it fails, we don't want the extra destination file hanging out - if (which == .both or which == .source) { - this.source_fd = switch (bun.sys.open( - this.source_file_store.pathlike.path.sliceZ(&path_buf1), - open_source_flags, - 0, - )) { - .result => |result| switch (result.makeLibUVOwnedForSyscall(.open, .close_on_fail)) { - .result => |result_fd| result_fd, - .err => |errno| { - this.system_error = errno.toSystemError(); - return bun.errnoToZigErr(errno.errno); - }, - }, - .err => |errno| { - this.system_error = errno.toSystemError(); - return bun.errnoToZigErr(errno.errno); - }, - }; - } - - if (which == .both or which == .destination) { - while (true) { - const dest = this.destination_file_store.pathlike.path.sliceZ(&path_buf1); - this.destination_fd = switch (bun.sys.open( - dest, - open_destination_flags, - JSC.Node.default_permission, - )) { - .result => |result| switch (result.makeLibUVOwnedForSyscall(.open, .close_on_fail)) { - .result => |result_fd| result_fd, - .err => |errno| { - this.system_error = errno.toSystemError(); - return bun.errnoToZigErr(errno.errno); - }, - }, - .err => |errno| { - switch (mkdirIfNotExists(this, errno, dest, dest)) { - .@"continue" => continue, - .fail => { - if (which == .both) { - this.source_fd.close(); - this.source_fd = .invalid; - } - return bun.errnoToZigErr(errno.errno); - }, - .no => {}, - } - - if (which == .both) { - this.source_fd.close(); - this.source_fd = .invalid; - } - - this.system_error = errno.withPath(this.destination_file_store.pathlike.path.slice()).toSystemError(); - return bun.errnoToZigErr(errno.errno); - }, - }; - break; - } - } - } - - const TryWith = enum { - sendfile, - copy_file_range, - splice, - - pub const tag = std.EnumMap(TryWith, bun.sys.Tag).init(.{ - .sendfile = .sendfile, - .copy_file_range = .copy_file_range, - .splice = .splice, - }); - }; - - pub fn doCopyFileRange( - this: *CopyFile, - comptime use: TryWith, - comptime clear_append_if_invalid: bool, - ) anyerror!void { - this.read_off += this.offset; - - var remain = @as(usize, this.max_length); - const unknown_size = remain == max_size or remain == 0; - if (unknown_size) { - // sometimes stat lies - // let's give it 4096 and see how it goes - remain = 4096; - } - - var total_written: usize = 0; - const src_fd = this.source_fd; - const dest_fd = this.destination_fd; - - defer { - this.read_len = @as(SizeType, @truncate(total_written)); - } - - var has_unset_append = false; - - // If they can't use copy_file_range, they probably also can't - // use sendfile() or splice() - if (!bun.canUseCopyFileRangeSyscall()) { - switch (JSC.Node.NodeFS.copyFileUsingReadWriteLoop("", "", src_fd, dest_fd, if (unknown_size) 0 else remain, &total_written)) { - .err => |err| { - this.system_error = err.toSystemError(); - return bun.errnoToZigErr(err.errno); - }, - .result => { - _ = linux.ftruncate(dest_fd.cast(), @as(std.posix.off_t, @intCast(total_written))); - return; - }, - } - } - - while (true) { - // TODO: this should use non-blocking I/O. - const written = switch (comptime use) { - .copy_file_range => linux.copy_file_range(src_fd.cast(), null, dest_fd.cast(), null, remain, 0), - .sendfile => linux.sendfile(dest_fd.cast(), src_fd.cast(), null, remain), - .splice => bun.C.splice(src_fd.cast(), null, dest_fd.cast(), null, remain, 0), - }; - - switch (bun.C.getErrno(written)) { - .SUCCESS => {}, - - .NOSYS, .XDEV => { - // TODO: this should use non-blocking I/O. - switch (JSC.Node.NodeFS.copyFileUsingReadWriteLoop("", "", src_fd, dest_fd, if (unknown_size) 0 else remain, &total_written)) { - .err => |err| { - this.system_error = err.toSystemError(); - return bun.errnoToZigErr(err.errno); - }, - .result => { - _ = linux.ftruncate(dest_fd.cast(), @as(std.posix.off_t, @intCast(total_written))); - return; - }, - } - }, - - .INVAL => { - if (comptime clear_append_if_invalid) { - if (!has_unset_append) { - // https://kylelaker.com/2018/08/31/stdout-oappend.html - // make() can set STDOUT / STDERR to O_APPEND - // this messes up sendfile() - has_unset_append = true; - const flags = linux.fcntl(dest_fd.cast(), linux.F.GETFL, @as(c_int, 0)); - if ((flags & O.APPEND) != 0) { - _ = linux.fcntl(dest_fd.cast(), linux.F.SETFL, flags ^ O.APPEND); - continue; - } - } - } - - // If the Linux machine doesn't support - // copy_file_range or the file descrpitor is - // incompatible with the chosen syscall, fall back - // to a read/write loop - if (total_written == 0) { - // TODO: this should use non-blocking I/O. - switch (JSC.Node.NodeFS.copyFileUsingReadWriteLoop("", "", src_fd, dest_fd, if (unknown_size) 0 else remain, &total_written)) { - .err => |err| { - this.system_error = err.toSystemError(); - return bun.errnoToZigErr(err.errno); - }, - .result => { - _ = linux.ftruncate(dest_fd.cast(), @as(std.posix.off_t, @intCast(total_written))); - return; - }, - } - } - - this.system_error = (bun.sys.Error{ - .errno = @as(bun.sys.Error.Int, @intCast(@intFromEnum(linux.E.INVAL))), - .syscall = TryWith.tag.get(use).?, - }).toSystemError(); - return bun.errnoToZigErr(linux.E.INVAL); - }, - else => |errno| { - this.system_error = (bun.sys.Error{ - .errno = @as(bun.sys.Error.Int, @intCast(@intFromEnum(errno))), - .syscall = TryWith.tag.get(use).?, - }).toSystemError(); - return bun.errnoToZigErr(errno); - }, - } - - // wrote zero bytes means EOF - remain -|= @intCast(written); - total_written += @intCast(written); - if (written == 0 or remain == 0) break; - } - } - - pub fn doFCopyFileWithReadWriteLoopFallback(this: *CopyFile) anyerror!void { - switch (bun.sys.fcopyfile(this.source_fd, this.destination_fd, posix.system.COPYFILE{ .DATA = true })) { - .err => |errno| { - switch (errno.getErrno()) { - // If the file type doesn't support seeking, it may return EBADF - // Example case: - // - // bun test bun-write.test | xargs echo - // - .BADF => { - var total_written: u64 = 0; - - // TODO: this should use non-blocking I/O. - switch (JSC.Node.NodeFS.copyFileUsingReadWriteLoop("", "", this.source_fd, this.destination_fd, 0, &total_written)) { - .err => |err| { - this.system_error = err.toSystemError(); - return bun.errnoToZigErr(err.errno); - }, - .result => {}, - } - }, - else => { - this.system_error = errno.toSystemError(); - - return bun.errnoToZigErr(errno.errno); - }, - } - }, - .result => {}, - } - } - - pub fn doClonefile(this: *CopyFile) anyerror!void { - var source_buf: bun.PathBuffer = undefined; - var dest_buf: bun.PathBuffer = undefined; - - while (true) { - const dest = this.destination_file_store.pathlike.path.sliceZ( - &dest_buf, - ); - switch (bun.sys.clonefile( - this.source_file_store.pathlike.path.sliceZ(&source_buf), - dest, - )) { - .err => |errno| { - switch (mkdirIfNotExists(this, errno, dest, this.destination_file_store.pathlike.path.slice())) { - .@"continue" => continue, - .fail => {}, - .no => {}, - } - this.system_error = errno.toSystemError(); - return bun.errnoToZigErr(errno.errno); - }, - .result => {}, - } - break; - } - } - - pub fn runAsync(this: *CopyFile) void { - if (Environment.isWindows) return; //why - // defer task.onFinish(); - - var stat_: ?bun.Stat = null; - - if (this.destination_file_store.pathlike == .fd) { - this.destination_fd = this.destination_file_store.pathlike.fd; - } - - if (this.source_file_store.pathlike == .fd) { - this.source_fd = this.source_file_store.pathlike.fd; - } - - // Do we need to open both files? - if (this.destination_fd == invalid_fd and this.source_fd == invalid_fd) { - - // First, we attempt to clonefile() on macOS - // This is the fastest way to copy a file. - if (comptime Environment.isMac) { - if (this.offset == 0 and this.source_file_store.pathlike == .path and this.destination_file_store.pathlike == .path) { - do_clonefile: { - var path_buf: bun.PathBuffer = undefined; - - // stat the output file, make sure it: - // 1. Exists - switch (bun.sys.stat(this.source_file_store.pathlike.path.sliceZ(&path_buf))) { - .result => |result| { - stat_ = result; - - if (posix.S.ISDIR(result.mode)) { - this.system_error = unsupported_directory_error; - return; - } - - if (!posix.S.ISREG(result.mode)) - break :do_clonefile; - }, - .err => |err| { - // If we can't stat it, we also can't copy it. - this.system_error = err.toSystemError(); - return; - }, - } - - if (this.doClonefile()) { - if (this.max_length != Blob.max_size and this.max_length < @as(SizeType, @intCast(stat_.?.size))) { - // If this fails...well, there's not much we can do about it. - _ = bun.C.truncate( - this.destination_file_store.pathlike.path.sliceZ(&path_buf), - @as(std.posix.off_t, @intCast(this.max_length)), - ); - this.read_len = @as(SizeType, @intCast(this.max_length)); - } else { - this.read_len = @as(SizeType, @intCast(stat_.?.size)); - } - return; - } else |_| { - - // this may still fail, in which case we just continue trying with fcopyfile - // it can fail when the input file already exists - // or if the output is not a directory - // or if it's a network volume - this.system_error = null; - } - } - } - } - - this.doOpenFile(.both) catch return; - // Do we need to open only one file? - } else if (this.destination_fd == invalid_fd) { - this.source_fd = this.source_file_store.pathlike.fd; - - this.doOpenFile(.destination) catch return; - // Do we need to open only one file? - } else if (this.source_fd == invalid_fd) { - this.destination_fd = this.destination_file_store.pathlike.fd; - - this.doOpenFile(.source) catch return; - } - - if (this.system_error != null) { - return; - } - - assert(this.destination_fd != invalid_fd); - assert(this.source_fd != invalid_fd); - - if (this.destination_file_store.pathlike == .fd) {} - - const stat: bun.Stat = stat_ orelse switch (bun.sys.fstat(this.source_fd)) { - .result => |result| result, - .err => |err| { - this.doClose(); - this.system_error = err.toSystemError(); - return; - }, - }; - - if (posix.S.ISDIR(stat.mode)) { - this.system_error = unsupported_directory_error; - this.doClose(); - return; - } - - if (stat.size != 0) { - this.max_length = @max(@min(@as(SizeType, @intCast(stat.size)), this.max_length), this.offset) - this.offset; - if (this.max_length == 0) { - this.doClose(); - return; - } - - if (posix.S.ISREG(stat.mode) and - this.max_length > bun.C.preallocate_length and - this.max_length != Blob.max_size) - { - bun.C.preallocate_file(this.destination_fd.cast(), 0, this.max_length) catch {}; - } - } - - if (comptime Environment.isLinux) { - - // Bun.write(Bun.file("a"), Bun.file("b")) - if (posix.S.ISREG(stat.mode) and (posix.S.ISREG(this.destination_file_store.mode) or this.destination_file_store.mode == 0)) { - if (this.destination_file_store.is_atty orelse false) { - this.doCopyFileRange(.copy_file_range, true) catch {}; - } else { - this.doCopyFileRange(.copy_file_range, false) catch {}; - } - - this.doClose(); - return; - } - - // $ bun run foo.js | bun run bar.js - if (posix.S.ISFIFO(stat.mode) and posix.S.ISFIFO(this.destination_file_store.mode)) { - if (this.destination_file_store.is_atty orelse false) { - this.doCopyFileRange(.splice, true) catch {}; - } else { - this.doCopyFileRange(.splice, false) catch {}; - } - - this.doClose(); - return; - } - - if (posix.S.ISREG(stat.mode) or posix.S.ISCHR(stat.mode) or posix.S.ISSOCK(stat.mode)) { - if (this.destination_file_store.is_atty orelse false) { - this.doCopyFileRange(.sendfile, true) catch {}; - } else { - this.doCopyFileRange(.sendfile, false) catch {}; - } - - this.doClose(); - return; - } - - this.system_error = unsupported_non_regular_file_error; - this.doClose(); - return; - } - - if (comptime Environment.isMac) { - this.doFCopyFileWithReadWriteLoopFallback() catch { - this.doClose(); - - return; - }; - if (stat.size != 0 and @as(SizeType, @intCast(stat.size)) > this.max_length) { - _ = darwin.ftruncate(this.destination_fd.cast(), @as(std.posix.off_t, @intCast(this.max_length))); - } - - this.doClose(); - } else { - @compileError("TODO: implement copyfile"); - } - } - }; - }; - - pub const FileStore = struct { - pathlike: JSC.Node.PathOrFileDescriptor, - mime_type: http.MimeType = http.MimeType.other, - is_atty: ?bool = null, - mode: bun.Mode = 0, - seekable: ?bool = null, - max_size: SizeType = Blob.max_size, - // milliseconds since ECMAScript epoch - last_modified: JSC.JSTimeType = JSC.init_timestamp, - - pub fn unlink(this: *const FileStore, globalThis: *JSC.JSGlobalObject) bun.JSError!JSValue { - return switch (this.pathlike) { - .path => |path_like| JSC.Node.Async.unlink.create(globalThis, undefined, .{ - .path = .{ - .encoded_slice = switch (path_like) { - .encoded_slice => |slice| try slice.toOwned(bun.default_allocator), - else => try ZigString.init(path_like.slice()).toSliceClone(bun.default_allocator), - }, - }, - }, globalThis.bunVM()), - .fd => JSC.JSPromise.resolvedPromiseValue(globalThis, globalThis.createInvalidArgs("Is not possible to unlink a file descriptor", .{})), - }; - } - pub fn isSeekable(this: *const FileStore) ?bool { - if (this.seekable) |seekable| { - return seekable; - } - - if (this.mode != 0) { - return bun.isRegularFile(this.mode); - } - - return null; - } - - pub fn init(pathlike: JSC.Node.PathOrFileDescriptor, mime_type: ?http.MimeType) FileStore { - return .{ .pathlike = pathlike, .mime_type = mime_type orelse http.MimeType.other }; - } - }; - - pub const S3Store = struct { - pathlike: JSC.Node.PathLike, - mime_type: http.MimeType = http.MimeType.other, - credentials: ?*S3Credentials, - options: bun.S3.MultiPartUploadOptions = .{}, - acl: ?S3.ACL = null, - storage_class: ?S3.StorageClass = null, - - pub fn isSeekable(_: *const @This()) ?bool { - return true; - } - - pub fn getCredentials(this: *const @This()) *S3Credentials { - bun.assert(this.credentials != null); - return this.credentials.?; - } - - pub fn getCredentialsWithOptions(this: *const @This(), options: ?JSValue, globalObject: *JSC.JSGlobalObject) bun.JSError!S3.S3CredentialsWithOptions { - return S3Credentials.getCredentialsWithOptions(this.getCredentials().*, this.options, options, this.acl, this.storage_class, globalObject); - } - - pub fn path(this: *@This()) []const u8 { - var path_name = bun.URL.parse(this.pathlike.slice()).s3Path(); - // normalize start and ending - if (strings.endsWith(path_name, "/")) { - path_name = path_name[0..path_name.len]; - } else if (strings.endsWith(path_name, "\\")) { - path_name = path_name[0 .. path_name.len - 1]; - } - if (strings.startsWith(path_name, "/")) { - path_name = path_name[1..]; - } else if (strings.startsWith(path_name, "\\")) { - path_name = path_name[1..]; - } - return path_name; - } - - pub fn unlink(this: *@This(), store: *Store, globalThis: *JSC.JSGlobalObject, extra_options: ?JSValue) bun.JSError!JSValue { - const Wrapper = struct { - promise: JSC.JSPromise.Strong, - store: *Store, - global: *JSC.JSGlobalObject, - - pub const new = bun.TrivialNew(@This()); - - pub fn resolve(result: S3.S3DeleteResult, opaque_self: *anyopaque) void { - const self: *@This() = @ptrCast(@alignCast(opaque_self)); - defer self.deinit(); - const globalObject = self.global; - switch (result) { - .success => { - self.promise.resolve(globalObject, .true); - }, - .not_found, .failure => |err| { - self.promise.reject(globalObject, err.toJS(globalObject, self.store.getPath())); - }, - } - } - - fn deinit(wrap: *@This()) void { - wrap.store.deref(); - wrap.promise.deinit(); - bun.destroy(wrap); - } - }; - const promise = JSC.JSPromise.Strong.init(globalThis); - const value = promise.value(); - const proxy_url = globalThis.bunVM().transpiler.env.getHttpProxy(true, null); - const proxy = if (proxy_url) |url| url.href else null; - var aws_options = try this.getCredentialsWithOptions(extra_options, globalThis); - defer aws_options.deinit(); - S3.delete(&aws_options.credentials, this.path(), @ptrCast(&Wrapper.resolve), Wrapper.new(.{ - .promise = promise, - .store = store, // store is needed in case of not found error - .global = globalThis, - }), proxy); - store.ref(); - - return value; - } - - pub fn listObjects(this: *@This(), store: *Store, globalThis: *JSC.JSGlobalObject, listOptions: JSValue, extra_options: ?JSValue) bun.JSError!JSValue { - if (!listOptions.isEmptyOrUndefinedOrNull() and !listOptions.isObject()) { - return globalThis.throwInvalidArguments("S3Client.listObjects() needs a S3ListObjectsOption as it's first argument", .{}); - } - - const Wrapper = struct { - promise: JSC.JSPromise.Strong, - store: *Store, - resolvedlistOptions: S3.S3ListObjectsOptions, - global: *JSC.JSGlobalObject, - - pub fn resolve(result: S3.S3ListObjectsResult, opaque_self: *anyopaque) void { - const self: *@This() = @ptrCast(@alignCast(opaque_self)); - defer self.deinit(); - const globalObject = self.global; - - switch (result) { - .success => |list_result| { - defer list_result.deinit(); - self.promise.resolve(globalObject, list_result.toJS(globalObject)); - }, - - inline .not_found, .failure => |err| { - self.promise.reject(globalObject, err.toJS(globalObject, self.store.getPath())); - }, - } - } - - fn deinit(self: *@This()) void { - self.store.deref(); - self.promise.deinit(); - self.resolvedlistOptions.deinit(); - self.destroy(); - } - - pub inline fn destroy(self: *@This()) void { - bun.destroy(self); - } - }; - - const promise = JSC.JSPromise.Strong.init(globalThis); - const value = promise.value(); - const proxy_url = globalThis.bunVM().transpiler.env.getHttpProxy(true, null); - const proxy = if (proxy_url) |url| url.href else null; - var aws_options = try this.getCredentialsWithOptions(extra_options, globalThis); - defer aws_options.deinit(); - - const options = S3.getListObjectsOptionsFromJS(globalThis, listOptions) catch bun.outOfMemory(); - store.ref(); - - S3.listObjects(&aws_options.credentials, options, @ptrCast(&Wrapper.resolve), bun.new(Wrapper, .{ - .promise = promise, - .store = store, // store is needed in case of not found error - .resolvedlistOptions = options, - .global = globalThis, - }), proxy); - - return value; - } - - pub fn initWithReferencedCredentials(pathlike: JSC.Node.PathLike, mime_type: ?http.MimeType, credentials: *S3Credentials) S3Store { - credentials.ref(); - return .{ - .credentials = credentials, - .pathlike = pathlike, - .mime_type = mime_type orelse http.MimeType.other, - }; - } - pub fn init(pathlike: JSC.Node.PathLike, mime_type: ?http.MimeType, credentials: S3Credentials) S3Store { - return .{ - .credentials = credentials.dupe(), - .pathlike = pathlike, - .mime_type = mime_type orelse http.MimeType.other, - }; - } - pub fn estimatedSize(this: *const @This()) usize { - return this.pathlike.estimatedSize() + if (this.credentials) |credentials| credentials.estimatedSize() else 0; - } - - pub fn deinit(this: *@This(), allocator: std.mem.Allocator) void { - if (this.pathlike == .string) { - allocator.free(@constCast(this.pathlike.slice())); - } else { - this.pathlike.deinit(); - } - this.pathlike = .{ - .string = bun.PathString.empty, - }; - if (this.credentials) |credentials| { - credentials.deref(); - this.credentials = null; - } - } - }; - - pub const ByteStore = struct { - ptr: ?[*]u8 = undefined, - len: SizeType = 0, - cap: SizeType = 0, - allocator: std.mem.Allocator, - - /// Used by standalone module graph and the File constructor - stored_name: bun.PathString = bun.PathString.empty, - - /// Takes ownership of `bytes`, which must have been allocated with - /// `allocator`. - pub fn init(bytes: []u8, allocator: std.mem.Allocator) ByteStore { - return .{ - .ptr = bytes.ptr, - .len = @as(SizeType, @truncate(bytes.len)), - .cap = @as(SizeType, @truncate(bytes.len)), - .allocator = allocator, - }; - } - pub fn initEmptyWithName(name: bun.PathString, allocator: std.mem.Allocator) ByteStore { - return .{ - .ptr = null, - .len = 0, - .cap = 0, - .allocator = allocator, - .stored_name = name, - }; - } - - pub fn fromArrayList(list: std.ArrayListUnmanaged(u8), allocator: std.mem.Allocator) !*ByteStore { - return ByteStore.init(list.items, allocator); - } - - pub fn toInternalBlob(this: *ByteStore) InternalBlob { - const ptr = this.ptr orelse return InternalBlob{ - .bytes = std.ArrayList(u8){ - .items = &.{}, - .capacity = 0, - .allocator = this.allocator, - }, - }; - - const result = InternalBlob{ - .bytes = std.ArrayList(u8){ - .items = ptr[0..this.len], - .capacity = this.cap, - .allocator = this.allocator, - }, - }; - - this.allocator = bun.default_allocator; - this.len = 0; - this.cap = 0; - return result; - } - pub fn slice(this: ByteStore) []u8 { - if (this.ptr) |ptr| { - return ptr[0..this.len]; - } - return ""; - } - - pub fn allocatedSlice(this: ByteStore) []u8 { - if (this.ptr) |ptr| { - return ptr[0..this.cap]; - } - return ""; - } - - pub fn deinit(this: *ByteStore) void { - bun.default_allocator.free(this.stored_name.slice()); - if (this.ptr) |ptr| { - this.allocator.free(ptr[0..this.cap]); - } - this.ptr = null; - this.len = 0; - this.cap = 0; - } - - pub fn asArrayList(this: ByteStore) std.ArrayListUnmanaged(u8) { - return this.asArrayListLeak(); - } - - pub fn asArrayListLeak(this: ByteStore) std.ArrayListUnmanaged(u8) { - return .{ - .items = this.ptr[0..this.len], - .capacity = this.cap, - }; - } - }; - - pub fn getStream( - this: *Blob, - globalThis: *JSC.JSGlobalObject, - callframe: *JSC.CallFrame, - ) bun.JSError!JSC.JSValue { - const thisValue = callframe.this(); - if (js.streamGetCached(thisValue)) |cached| { - return cached; - } - var recommended_chunk_size: SizeType = 0; - var arguments_ = callframe.arguments_old(2); - var arguments = arguments_.ptr[0..arguments_.len]; - if (arguments.len > 0) { - if (!arguments[0].isNumber() and !arguments[0].isUndefinedOrNull()) { - return globalThis.throwInvalidArguments("chunkSize must be a number", .{}); - } - - recommended_chunk_size = @as(SizeType, @intCast(@max(0, @as(i52, @truncate(arguments[0].toInt64()))))); - } - const stream = JSC.WebCore.ReadableStream.fromBlob( - globalThis, - this, - recommended_chunk_size, - ); - - if (this.store) |store| { - switch (store.data) { - .file => |f| switch (f.pathlike) { - .fd => { - // in the case we have a file descriptor store, we want to de-duplicate - // readable streams. in every other case we want `.stream()` to be it's - // own stream. - js.streamSetCached(thisValue, globalThis, stream); - }, - else => {}, - }, - else => {}, - } - } - - return stream; - } - - pub fn toStreamWithOffset( - globalThis: *JSC.JSGlobalObject, - callframe: *JSC.CallFrame, - ) bun.JSError!JSC.JSValue { - const this = callframe.this().as(Blob) orelse @panic("this is not a Blob"); - const args = callframe.arguments_old(1).slice(); - - return JSC.WebCore.ReadableStream.fromFileBlobWithOffset( - globalThis, - this, - @intCast(args[0].toInt64()), - ); - } - - // Zig doesn't let you pass a function with a comptime argument to a runtime-knwon function. - fn lifetimeWrap(comptime Fn: anytype, comptime lifetime: JSC.WebCore.Lifetime) fn (*Blob, *JSC.JSGlobalObject) JSC.JSValue { - return struct { - fn wrap(this: *Blob, globalObject: *JSC.JSGlobalObject) JSC.JSValue { - return JSC.toJSHostValue(globalObject, Fn(this, globalObject, lifetime)); - } - }.wrap; - } - - pub fn getText( - this: *Blob, - globalThis: *JSC.JSGlobalObject, - _: *JSC.CallFrame, - ) bun.JSError!JSC.JSValue { - return this.getTextClone(globalThis); - } - - pub fn getTextClone( - this: *Blob, - globalObject: *JSC.JSGlobalObject, - ) JSC.JSValue { - const store = this.store; - if (store) |st| st.ref(); - defer if (store) |st| st.deref(); - return JSC.JSPromise.wrap(globalObject, lifetimeWrap(toString, .clone), .{ this, globalObject }); - } - - pub fn getTextTransfer( - this: *Blob, - globalObject: *JSC.JSGlobalObject, - ) JSC.JSValue { - const store = this.store; - if (store) |st| st.ref(); - defer if (store) |st| st.deref(); - return JSC.JSPromise.wrap(globalObject, lifetimeWrap(toString, .transfer), .{ this, globalObject }); - } - - pub fn getJSON( - this: *Blob, - globalThis: *JSC.JSGlobalObject, - _: *JSC.CallFrame, - ) bun.JSError!JSC.JSValue { - return this.getJSONShare(globalThis); - } - - pub fn getJSONShare( - this: *Blob, - globalObject: *JSC.JSGlobalObject, - ) JSC.JSValue { - const store = this.store; - if (store) |st| st.ref(); - defer if (store) |st| st.deref(); - return JSC.JSPromise.wrap(globalObject, lifetimeWrap(toJSON, .share), .{ this, globalObject }); - } - pub fn getArrayBufferTransfer( - this: *Blob, - globalThis: *JSC.JSGlobalObject, - ) JSC.JSValue { - const store = this.store; - if (store) |st| st.ref(); - defer if (store) |st| st.deref(); - - return JSC.JSPromise.wrap(globalThis, lifetimeWrap(toArrayBuffer, .transfer), .{ this, globalThis }); - } - - pub fn getArrayBufferClone( - this: *Blob, - globalThis: *JSC.JSGlobalObject, - ) JSC.JSValue { - const store = this.store; - if (store) |st| st.ref(); - defer if (store) |st| st.deref(); - return JSC.JSPromise.wrap(globalThis, lifetimeWrap(toArrayBuffer, .clone), .{ this, globalThis }); - } - - pub fn getArrayBuffer( - this: *Blob, - globalThis: *JSC.JSGlobalObject, - _: *JSC.CallFrame, - ) bun.JSError!JSValue { - return this.getArrayBufferClone(globalThis); - } - - pub fn getBytesClone( - this: *Blob, - globalThis: *JSC.JSGlobalObject, - ) JSValue { - const store = this.store; - if (store) |st| st.ref(); - defer if (store) |st| st.deref(); - return JSC.JSPromise.wrap(globalThis, lifetimeWrap(toUint8Array, .clone), .{ this, globalThis }); - } - - pub fn getBytes( - this: *Blob, - globalThis: *JSC.JSGlobalObject, - _: *JSC.CallFrame, - ) bun.JSError!JSValue { - return this.getBytesClone(globalThis); - } - - pub fn getBytesTransfer( - this: *Blob, - globalThis: *JSC.JSGlobalObject, - ) JSValue { - const store = this.store; - if (store) |st| st.ref(); - defer if (store) |st| st.deref(); - return JSC.JSPromise.wrap(globalThis, lifetimeWrap(toUint8Array, .transfer), .{ this, globalThis }); - } - - pub fn getFormData( - this: *Blob, - globalThis: *JSC.JSGlobalObject, - _: *JSC.CallFrame, - ) bun.JSError!JSValue { - const store = this.store; - if (store) |st| st.ref(); - defer if (store) |st| st.deref(); - - return JSC.JSPromise.wrap(globalThis, lifetimeWrap(toFormData, .temporary), .{ this, globalThis }); - } - - fn getExistsSync(this: *Blob) JSC.JSValue { - if (this.size == Blob.max_size) { - this.resolveSize(); - } - - // If there's no store that means it's empty and we just return true - // it will not error to return an empty Blob - const store = this.store orelse return JSValue.jsBoolean(true); - - if (store.data == .bytes) { - // Bytes will never error - return JSValue.jsBoolean(true); - } - - // We say regular files and pipes exist. - // This is mostly meant for "Can we use this in new Response(file)?" - return JSValue.jsBoolean(bun.isRegularFile(store.data.file.mode) or bun.C.S.ISFIFO(store.data.file.mode)); - } - - pub fn isS3(this: *const Blob) bool { - if (this.store) |store| { - return store.data == .s3; - } - return false; - } - - const S3BlobDownloadTask = struct { - blob: Blob, - globalThis: *JSC.JSGlobalObject, - promise: JSC.JSPromise.Strong, - poll_ref: bun.Async.KeepAlive = .{}, - - handler: S3ReadHandler, - pub const new = bun.TrivialNew(S3BlobDownloadTask); - pub const S3ReadHandler = *const fn (this: *Blob, globalthis: *JSGlobalObject, raw_bytes: []u8) JSValue; - - pub fn callHandler(this: *S3BlobDownloadTask, raw_bytes: []u8) JSValue { - return this.handler(&this.blob, this.globalThis, raw_bytes); - } - pub fn onS3DownloadResolved(result: S3.S3DownloadResult, this: *S3BlobDownloadTask) void { - defer this.deinit(); - switch (result) { - .success => |response| { - const bytes = response.body.list.items; - if (this.blob.size == Blob.max_size) { - this.blob.size = @truncate(bytes.len); - } - JSC.AnyPromise.wrap(.{ .normal = this.promise.get() }, this.globalThis, S3BlobDownloadTask.callHandler, .{ this, bytes }); - }, - inline .not_found, .failure => |err| { - this.promise.reject(this.globalThis, err.toJS(this.globalThis, this.blob.store.?.getPath())); - }, - } - } - - pub fn init(globalThis: *JSC.JSGlobalObject, blob: *Blob, handler: S3BlobDownloadTask.S3ReadHandler) JSValue { - blob.store.?.ref(); - - const this = S3BlobDownloadTask.new(.{ - .globalThis = globalThis, - .blob = blob.*, - .promise = JSC.JSPromise.Strong.init(globalThis), - .handler = handler, - }); - const promise = this.promise.value(); - const env = this.globalThis.bunVM().transpiler.env; - const credentials = this.blob.store.?.data.s3.getCredentials(); - const path = this.blob.store.?.data.s3.path(); - - this.poll_ref.ref(globalThis.bunVM()); - if (blob.offset > 0) { - const len: ?usize = if (blob.size != Blob.max_size) @intCast(blob.size) else null; - const offset: usize = @intCast(blob.offset); - S3.downloadSlice(credentials, path, offset, len, @ptrCast(&S3BlobDownloadTask.onS3DownloadResolved), this, if (env.getHttpProxy(true, null)) |proxy| proxy.href else null); - } else if (blob.size == Blob.max_size) { - S3.download(credentials, path, @ptrCast(&S3BlobDownloadTask.onS3DownloadResolved), this, if (env.getHttpProxy(true, null)) |proxy| proxy.href else null); - } else { - const len: usize = @intCast(blob.size); - const offset: usize = @intCast(blob.offset); - S3.downloadSlice(credentials, path, offset, len, @ptrCast(&S3BlobDownloadTask.onS3DownloadResolved), this, if (env.getHttpProxy(true, null)) |proxy| proxy.href else null); - } - return promise; - } - - pub fn deinit(this: *S3BlobDownloadTask) void { - this.blob.store.?.deref(); - this.poll_ref.unref(this.globalThis.bunVM()); - this.promise.deinit(); - bun.destroy(this); - } - }; - - pub fn doWrite(this: *Blob, globalThis: *JSC.JSGlobalObject, callframe: *JSC.CallFrame) bun.JSError!JSValue { - const arguments = callframe.arguments_old(3).slice(); - var args = JSC.Node.ArgumentsSlice.init(globalThis.bunVM(), arguments); - defer args.deinit(); - - const data = args.nextEat() orelse { - return globalThis.throwInvalidArguments("blob.write(pathOrFdOrBlob, blob) expects a Blob-y thing to write", .{}); - }; - if (data.isEmptyOrUndefinedOrNull()) { - return globalThis.throwInvalidArguments("blob.write(pathOrFdOrBlob, blob) expects a Blob-y thing to write", .{}); - } - var mkdirp_if_not_exists: ?bool = null; - const options = args.nextEat(); - if (options) |options_object| { - if (options_object.isObject()) { - if (try options_object.getTruthy(globalThis, "createPath")) |create_directory| { - if (!create_directory.isBoolean()) { - return globalThis.throwInvalidArgumentType("write", "options.createPath", "boolean"); - } - mkdirp_if_not_exists = create_directory.toBoolean(); - } - if (try options_object.getTruthy(globalThis, "type")) |content_type| { - //override the content type - if (!content_type.isString()) { - return globalThis.throwInvalidArgumentType("write", "options.type", "string"); - } - var content_type_str = try content_type.toSlice(globalThis, bun.default_allocator); - defer content_type_str.deinit(); - const slice = content_type_str.slice(); - if (strings.isAllASCII(slice)) { - if (this.content_type_allocated) { - bun.default_allocator.free(this.content_type); - } - this.content_type_was_set = true; - - if (globalThis.bunVM().mimeType(slice)) |mime| { - this.content_type = mime.value; - } else { - const content_type_buf = bun.default_allocator.alloc(u8, slice.len) catch bun.outOfMemory(); - this.content_type = strings.copyLowercase(slice, content_type_buf); - this.content_type_allocated = true; - } - } - } - } else if (!options_object.isEmptyOrUndefinedOrNull()) { - return globalThis.throwInvalidArgumentType("write", "options", "object"); - } - } - var blob_internal: PathOrBlob = .{ .blob = this.* }; - return writeFileInternal(globalThis, &blob_internal, data, .{ .mkdirp_if_not_exists = mkdirp_if_not_exists, .extra_options = options }); - } - - pub fn doUnlink(this: *Blob, globalThis: *JSC.JSGlobalObject, callframe: *JSC.CallFrame) bun.JSError!JSValue { - const arguments = callframe.arguments_old(1).slice(); - var args = JSC.Node.ArgumentsSlice.init(globalThis.bunVM(), arguments); - defer args.deinit(); - const store = this.store orelse { - return JSC.JSPromise.resolvedPromiseValue(globalThis, globalThis.createInvalidArgs("Blob is detached", .{})); - }; - return switch (store.data) { - .s3 => |*s3| try s3.unlink(store, globalThis, args.nextEat()), - .file => |file| file.unlink(globalThis), - else => JSC.JSPromise.resolvedPromiseValue(globalThis, globalThis.createInvalidArgs("Blob is read-only", .{})), - }; - } - - // This mostly means 'can it be read?' - pub fn getExists( - this: *Blob, - globalThis: *JSC.JSGlobalObject, - _: *JSC.CallFrame, - ) bun.JSError!JSValue { - if (this.isS3()) { - return S3File.S3BlobStatTask.exists(globalThis, this); - } - return JSC.JSPromise.resolvedPromiseValue(globalThis, this.getExistsSync()); - } - - pub const FileStreamWrapper = struct { - promise: JSC.JSPromise.Strong, - readable_stream_ref: JSC.WebCore.ReadableStream.Strong, - sink: *JSC.WebCore.FileSink, - - pub const new = bun.TrivialNew(@This()); - - pub fn deinit(this: *@This()) void { - this.promise.deinit(); - this.readable_stream_ref.deinit(); - this.sink.deref(); - bun.destroy(this); - } - }; - - pub fn onFileStreamResolveRequestStream(globalThis: *JSC.JSGlobalObject, callframe: *JSC.CallFrame) bun.JSError!JSC.JSValue { - var args = callframe.arguments_old(2); - var this = args.ptr[args.len - 1].asPromisePtr(FileStreamWrapper); - defer this.deinit(); - var strong = this.readable_stream_ref; - defer strong.deinit(); - this.readable_stream_ref = .{}; - if (strong.get(globalThis)) |stream| { - stream.done(globalThis); - } - this.promise.resolve(globalThis, JSC.JSValue.jsNumber(0)); - return .undefined; - } - - pub fn onFileStreamRejectRequestStream(globalThis: *JSC.JSGlobalObject, callframe: *JSC.CallFrame) bun.JSError!JSC.JSValue { - const args = callframe.arguments_old(2); - var this = args.ptr[args.len - 1].asPromisePtr(FileStreamWrapper); - defer this.sink.deref(); - const err = args.ptr[0]; - - var strong = this.readable_stream_ref; - defer strong.deinit(); - this.readable_stream_ref = .{}; - - this.promise.reject(globalThis, err); - - if (strong.get(globalThis)) |stream| { - stream.cancel(globalThis); - } - return .undefined; - } - comptime { - const jsonResolveRequestStream = JSC.toJSHostFunction(onFileStreamResolveRequestStream); - @export(&jsonResolveRequestStream, .{ .name = "Bun__FileStreamWrapper__onResolveRequestStream" }); - const jsonRejectRequestStream = JSC.toJSHostFunction(onFileStreamRejectRequestStream); - @export(&jsonRejectRequestStream, .{ .name = "Bun__FileStreamWrapper__onRejectRequestStream" }); - } - - pub fn pipeReadableStreamToBlob(this: *Blob, globalThis: *JSC.JSGlobalObject, readable_stream: JSC.WebCore.ReadableStream, extra_options: ?JSValue) JSC.JSValue { - var store = this.store orelse { - return JSC.JSPromise.dangerouslyCreateRejectedPromiseValueWithoutNotifyingVM(globalThis, globalThis.createErrorInstance("Blob is detached", .{})); - }; - - if (this.isS3()) { - const s3 = &this.store.?.data.s3; - var aws_options = s3.getCredentialsWithOptions(extra_options, globalThis) catch |err| { - return JSC.JSPromise.dangerouslyCreateRejectedPromiseValueWithoutNotifyingVM(globalThis, globalThis.takeException(err)); - }; - defer aws_options.deinit(); - - const path = s3.path(); - const proxy = globalThis.bunVM().transpiler.env.getHttpProxy(true, null); - const proxy_url = if (proxy) |p| p.href else null; - - return S3.uploadStream( - (if (extra_options != null) aws_options.credentials.dupe() else s3.getCredentials()), - path, - readable_stream, - globalThis, - aws_options.options, - aws_options.acl, - aws_options.storage_class, - this.contentTypeOrMimeType(), - proxy_url, - null, - undefined, - ); - } - - if (store.data != .file) { - return JSC.JSPromise.dangerouslyCreateRejectedPromiseValueWithoutNotifyingVM(globalThis, globalThis.createErrorInstance("Blob is read-only", .{})); - } - - const file_sink = brk_sink: { - if (Environment.isWindows) { - const pathlike = store.data.file.pathlike; - const fd: bun.FileDescriptor = if (pathlike == .fd) pathlike.fd else brk: { - var file_path: bun.PathBuffer = undefined; - const path = pathlike.path.sliceZ(&file_path); - switch (bun.sys.open( - path, - bun.O.WRONLY | bun.O.CREAT | bun.O.NONBLOCK, - write_permissions, - )) { - .result => |result| { - break :brk result; - }, - .err => |err| { - return JSC.JSPromise.dangerouslyCreateRejectedPromiseValueWithoutNotifyingVM(globalThis, err.withPath(path).toJSC(globalThis)); - }, - } - unreachable; - }; - - const is_stdout_or_stderr = brk: { - if (pathlike != .fd) { - break :brk false; - } - - if (globalThis.bunVM().rare_data) |rare| { - if (store == rare.stdout_store) { - break :brk true; - } - - if (store == rare.stderr_store) { - break :brk true; - } - } - - break :brk if (fd.stdioTag()) |tag| switch (tag) { - .std_out, .std_err => true, - else => false, - } else false; - }; - var sink = JSC.WebCore.FileSink.init(fd, this.globalThis.bunVM().eventLoop()); - sink.writer.owns_fd = pathlike != .fd; - - if (is_stdout_or_stderr) { - switch (sink.writer.startSync(fd, false)) { - .err => |err| { - sink.deref(); - return JSC.JSPromise.dangerouslyCreateRejectedPromiseValueWithoutNotifyingVM(globalThis, err.toJSC(globalThis)); - }, - else => {}, - } - } else { - switch (sink.writer.start(fd, true)) { - .err => |err| { - sink.deref(); - return JSC.JSPromise.dangerouslyCreateRejectedPromiseValueWithoutNotifyingVM(globalThis, err.toJSC(globalThis)); - }, - else => {}, - } - } - - break :brk_sink sink; - } - - var sink = JSC.WebCore.FileSink.init(bun.invalid_fd, this.globalThis.bunVM().eventLoop()); - - const input_path: JSC.WebCore.PathOrFileDescriptor = brk: { - if (store.data.file.pathlike == .fd) { - break :brk .{ .fd = store.data.file.pathlike.fd }; - } else { - break :brk .{ - .path = ZigString.Slice.fromUTF8NeverFree( - store.data.file.pathlike.path.slice(), - ).clone( - bun.default_allocator, - ) catch bun.outOfMemory(), - }; - } - }; - defer input_path.deinit(); - - const stream_start: JSC.WebCore.StreamStart = .{ - .FileSink = .{ - .input_path = input_path, - }, - }; - - switch (sink.start(stream_start)) { - .err => |err| { - sink.deref(); - return JSC.JSPromise.dangerouslyCreateRejectedPromiseValueWithoutNotifyingVM(globalThis, err.toJSC(globalThis)); - }, - else => {}, - } - break :brk_sink sink; - }; - var signal = &file_sink.signal; - - signal.* = JSC.WebCore.FileSink.JSSink.SinkSignal.init(.zero); - - // explicitly set it to a dead pointer - // we use this memory address to disable signals being sent - signal.clear(); - bun.assert(signal.isDead()); - - const assignment_result: JSC.JSValue = JSC.WebCore.FileSink.JSSink.assignToStream( - globalThis, - readable_stream.value, - file_sink, - @as(**anyopaque, @ptrCast(&signal.ptr)), - ); - - assignment_result.ensureStillAlive(); - - // assert that it was updated - bun.assert(!signal.isDead()); - - if (assignment_result.toError()) |err| { - file_sink.deref(); - return JSC.JSPromise.dangerouslyCreateRejectedPromiseValueWithoutNotifyingVM(globalThis, err); - } - - if (!assignment_result.isEmptyOrUndefinedOrNull()) { - globalThis.bunVM().drainMicrotasks(); - - assignment_result.ensureStillAlive(); - // it returns a Promise when it goes through ReadableStreamDefaultReader - if (assignment_result.asAnyPromise()) |promise| { - switch (promise.status(globalThis.vm())) { - .pending => { - const wrapper = FileStreamWrapper.new(.{ - .promise = JSC.JSPromise.Strong.init(globalThis), - .readable_stream_ref = JSC.WebCore.ReadableStream.Strong.init(readable_stream, globalThis), - .sink = file_sink, - }); - const promise_value = wrapper.promise.value(); - - assignment_result.then( - globalThis, - wrapper, - onFileStreamResolveRequestStream, - onFileStreamRejectRequestStream, - ); - return promise_value; - }, - .fulfilled => { - file_sink.deref(); - readable_stream.done(globalThis); - return JSC.JSPromise.resolvedPromiseValue(globalThis, JSC.JSValue.jsNumber(0)); - }, - .rejected => { - file_sink.deref(); - - readable_stream.cancel(globalThis); - - return JSC.JSPromise.dangerouslyCreateRejectedPromiseValueWithoutNotifyingVM(globalThis, promise.result(globalThis.vm())); - }, - } - } else { - file_sink.deref(); - - readable_stream.cancel(globalThis); - - return JSC.JSPromise.dangerouslyCreateRejectedPromiseValueWithoutNotifyingVM(globalThis, assignment_result); - } - } - file_sink.deref(); - - return JSC.JSPromise.resolvedPromiseValue(globalThis, JSC.JSValue.jsNumber(0)); - } - - pub fn getWriter( - this: *Blob, - globalThis: *JSC.JSGlobalObject, - callframe: *JSC.CallFrame, - ) bun.JSError!JSC.JSValue { - var arguments_ = callframe.arguments_old(1); - var arguments = arguments_.ptr[0..arguments_.len]; - - if (!arguments.ptr[0].isEmptyOrUndefinedOrNull() and !arguments.ptr[0].isObject()) { - return globalThis.throwInvalidArguments("options must be an object or undefined", .{}); - } - - var store = this.store orelse { - return globalThis.throwInvalidArguments("Blob is detached", .{}); - }; - if (this.isS3()) { - const s3 = &this.store.?.data.s3; - const path = s3.path(); - const proxy = globalThis.bunVM().transpiler.env.getHttpProxy(true, null); - const proxy_url = if (proxy) |p| p.href else null; - if (arguments.len > 0) { - const options = arguments.ptr[0]; - if (options.isObject()) { - if (try options.getTruthy(globalThis, "type")) |content_type| { - //override the content type - if (!content_type.isString()) { - return globalThis.throwInvalidArgumentType("write", "options.type", "string"); - } - var content_type_str = try content_type.toSlice(globalThis, bun.default_allocator); - defer content_type_str.deinit(); - const slice = content_type_str.slice(); - if (strings.isAllASCII(slice)) { - if (this.content_type_allocated) { - bun.default_allocator.free(this.content_type); - } - this.content_type_was_set = true; - - if (globalThis.bunVM().mimeType(slice)) |mime| { - this.content_type = mime.value; - } else { - const content_type_buf = bun.default_allocator.alloc(u8, slice.len) catch bun.outOfMemory(); - this.content_type = strings.copyLowercase(slice, content_type_buf); - this.content_type_allocated = true; - } - } - } - const credentialsWithOptions = try s3.getCredentialsWithOptions(options, globalThis); - return try S3.writableStream( - credentialsWithOptions.credentials.dupe(), - path, - globalThis, - credentialsWithOptions.options, - this.contentTypeOrMimeType(), - proxy_url, - credentialsWithOptions.storage_class, - ); - } - } - return try S3.writableStream( - s3.getCredentials(), - path, - globalThis, - .{}, - this.contentTypeOrMimeType(), - proxy_url, - null, - ); - } - if (store.data != .file) { - return globalThis.throwInvalidArguments("Blob is read-only", .{}); - } - - if (Environment.isWindows) { - const pathlike = store.data.file.pathlike; - const vm = globalThis.bunVM(); - const fd: bun.FileDescriptor = if (pathlike == .fd) pathlike.fd else brk: { - var file_path: bun.PathBuffer = undefined; - switch (bun.sys.open( - pathlike.path.sliceZ(&file_path), - bun.O.WRONLY | bun.O.CREAT | bun.O.NONBLOCK, - write_permissions, - )) { - .result => |result| { - break :brk result; - }, - .err => |err| { - return globalThis.throwValue(err.withPath(pathlike.path.slice()).toJSC(globalThis)); - }, - } - @compileError(unreachable); - }; - - const is_stdout_or_stderr = brk: { - if (pathlike != .fd) { - break :brk false; - } - - if (vm.rare_data) |rare| { - if (store == rare.stdout_store) { - break :brk true; - } - - if (store == rare.stderr_store) { - break :brk true; - } - } - - break :brk if (fd.stdioTag()) |tag| switch (tag) { - .std_out, .std_err => true, - else => false, - } else false; - }; - var sink = JSC.WebCore.FileSink.init(fd, this.globalThis.bunVM().eventLoop()); - sink.writer.owns_fd = pathlike != .fd; - - if (is_stdout_or_stderr) { - switch (sink.writer.startSync(fd, false)) { - .err => |err| { - sink.deref(); - return globalThis.throwValue(err.toJSC(globalThis)); - }, - else => {}, - } - } else { - switch (sink.writer.start(fd, true)) { - .err => |err| { - sink.deref(); - return globalThis.throwValue(err.toJSC(globalThis)); - }, - else => {}, - } - } - - return sink.toJS(globalThis); - } - - var sink = JSC.WebCore.FileSink.init(bun.invalid_fd, this.globalThis.bunVM().eventLoop()); - - const input_path: JSC.WebCore.PathOrFileDescriptor = brk: { - if (store.data.file.pathlike == .fd) { - break :brk .{ .fd = store.data.file.pathlike.fd }; - } else { - break :brk .{ - .path = ZigString.Slice.fromUTF8NeverFree( - store.data.file.pathlike.path.slice(), - ).clone( - globalThis.allocator(), - ) catch bun.outOfMemory(), - }; - } - }; - defer input_path.deinit(); - - var stream_start: JSC.WebCore.StreamStart = .{ - .FileSink = .{ - .input_path = input_path, - }, - }; - - if (arguments.len > 0 and arguments.ptr[0].isObject()) { - stream_start = try JSC.WebCore.StreamStart.fromJSWithTag(globalThis, arguments[0], .FileSink); - stream_start.FileSink.input_path = input_path; - } - - switch (sink.start(stream_start)) { - .err => |err| { - sink.deref(); - return globalThis.throwValue(err.toJSC(globalThis)); - }, - else => {}, - } - - return sink.toJS(globalThis); - } - - pub fn getSliceFrom(this: *Blob, globalThis: *JSC.JSGlobalObject, relativeStart: i64, relativeEnd: i64, content_type: []const u8, content_type_was_allocated: bool) JSValue { - const offset = this.offset +| @as(SizeType, @intCast(relativeStart)); - const len = @as(SizeType, @intCast(@max(relativeEnd -| relativeStart, 0))); - - // This copies over the is_all_ascii flag - // which is okay because this will only be a <= slice - var blob = this.dupe(); - blob.offset = offset; - blob.size = len; - - // infer the content type if it was not specified - if (content_type.len == 0 and this.content_type.len > 0 and !this.content_type_allocated) { - blob.content_type = this.content_type; - } else { - blob.content_type = content_type; - } - blob.content_type_allocated = content_type_was_allocated; - blob.content_type_was_set = this.content_type_was_set or content_type_was_allocated; - - var blob_ = Blob.new(blob); - blob_.allocator = bun.default_allocator; - return blob_.toJS(globalThis); - } - - /// https://w3c.github.io/FileAPI/#slice-method-algo - /// The slice() method returns a new Blob object with bytes ranging from the - /// optional start parameter up to but not including the optional end - /// parameter, and with a type attribute that is the value of the optional - /// contentType parameter. It must act as follows: - pub fn getSlice( - this: *Blob, - globalThis: *JSC.JSGlobalObject, - callframe: *JSC.CallFrame, - ) bun.JSError!JSC.JSValue { - const allocator = bun.default_allocator; - var arguments_ = callframe.arguments_old(3); - var args = arguments_.ptr[0..arguments_.len]; - - if (this.size == 0) { - const empty = Blob.initEmpty(globalThis); - var ptr = Blob.new(empty); - ptr.allocator = allocator; - return ptr.toJS(globalThis); - } - - // If the optional start parameter is not used as a parameter when making this call, let relativeStart be 0. - var relativeStart: i64 = 0; - - // If the optional end parameter is not used as a parameter when making this call, let relativeEnd be size. - var relativeEnd: i64 = @as(i64, @intCast(this.size)); - - if (args.ptr[0].isString()) { - args.ptr[2] = args.ptr[0]; - args.ptr[1] = .zero; - args.ptr[0] = .zero; - args.len = 3; - } else if (args.ptr[1].isString()) { - args.ptr[2] = args.ptr[1]; - args.ptr[1] = .zero; - args.len = 3; - } - - var args_iter = JSC.Node.ArgumentsSlice.init(globalThis.bunVM(), args); - if (args_iter.nextEat()) |start_| { - if (start_.isNumber()) { - const start = start_.toInt64(); - if (start < 0) { - // If the optional start parameter is negative, let relativeStart be start + size. - relativeStart = @as(i64, @intCast(@max(start +% @as(i64, @intCast(this.size)), 0))); - } else { - // Otherwise, let relativeStart be start. - relativeStart = @min(@as(i64, @intCast(start)), @as(i64, @intCast(this.size))); - } - } - } - - if (args_iter.nextEat()) |end_| { - if (end_.isNumber()) { - const end = end_.toInt64(); - // If end is negative, let relativeEnd be max((size + end), 0). - if (end < 0) { - // If the optional start parameter is negative, let relativeStart be start + size. - relativeEnd = @as(i64, @intCast(@max(end +% @as(i64, @intCast(this.size)), 0))); - } else { - // Otherwise, let relativeStart be start. - relativeEnd = @min(@as(i64, @intCast(end)), @as(i64, @intCast(this.size))); - } - } - } - - var content_type: string = ""; - var content_type_was_allocated = false; - if (args_iter.nextEat()) |content_type_| { - inner: { - if (content_type_.isString()) { - var zig_str = try content_type_.getZigString(globalThis); - var slicer = zig_str.toSlice(bun.default_allocator); - defer slicer.deinit(); - const slice = slicer.slice(); - if (!strings.isAllASCII(slice)) { - break :inner; - } - - if (globalThis.bunVM().mimeType(slice)) |mime| { - content_type = mime.value; - break :inner; - } - - content_type_was_allocated = slice.len > 0; - const content_type_buf = allocator.alloc(u8, slice.len) catch bun.outOfMemory(); - content_type = strings.copyLowercase(slice, content_type_buf); - } - } - } - - return this.getSliceFrom(globalThis, relativeStart, relativeEnd, content_type, content_type_was_allocated); - } - - pub fn getMimeType(this: *const Blob) ?bun.http.MimeType { - if (this.store) |store| { - return store.mime_type; - } - - return null; - } - - pub fn getMimeTypeOrContentType(this: *const Blob) ?bun.http.MimeType { - if (this.content_type_was_set) { - return bun.http.MimeType.init(this.content_type, null, null); - } - - if (this.store) |store| { - return store.mime_type; - } - - return null; - } - - pub fn getType( - this: *Blob, - globalThis: *JSC.JSGlobalObject, - ) JSValue { - if (this.content_type.len > 0) { - if (this.content_type_allocated) { - return ZigString.init(this.content_type).toJS(globalThis); - } - return ZigString.init(this.content_type).toJS(globalThis); - } - - if (this.store) |store| { - return ZigString.init(store.mime_type.value).toJS(globalThis); - } - - return ZigString.Empty.toJS(globalThis); - } - - pub fn getNameString(this: *Blob) ?bun.String { - if (this.name.tag != .Dead) return this.name; - - if (this.getFileName()) |path| { - this.name = bun.String.createUTF8(path); - return this.name; - } - - return null; - } - - // TODO: Move this to a separate `File` object or BunFile - pub fn getName( - this: *Blob, - _: JSC.JSValue, - globalThis: *JSC.JSGlobalObject, - ) JSValue { - return if (this.getNameString()) |name| name.toJS(globalThis) else .undefined; - } - - pub fn setName( - this: *Blob, - jsThis: JSC.JSValue, - globalThis: *JSC.JSGlobalObject, - value: JSValue, - - // TODO: support JSError for getters/setters - ) bool { - // by default we don't have a name so lets allow it to be set undefined - if (value.isEmptyOrUndefinedOrNull()) { - this.name.deref(); - this.name = bun.String.dead; - js.nameSetCached(jsThis, globalThis, value); - return true; - } - if (value.isString()) { - const old_name = this.name; - - this.name = bun.String.fromJS(value, globalThis) catch |err| { - switch (err) { - error.JSError => {}, - error.OutOfMemory => { - globalThis.throwOutOfMemory() catch {}; - }, - } - this.name = bun.String.empty; - return false; - }; - // We don't need to increment the reference count since tryFromJS already did it. - js.nameSetCached(jsThis, globalThis, value); - old_name.deref(); - return true; - } - return false; - } - - pub fn getFileName( - this: *const Blob, - ) ?[]const u8 { - if (this.store) |store| { - if (store.data == .file) { - if (store.data.file.pathlike == .path) { - return store.data.file.pathlike.path.slice(); - } - - // we shouldn't return Number here. - } else if (store.data == .bytes) { - if (store.data.bytes.stored_name.slice().len > 0) - return store.data.bytes.stored_name.slice(); - } else if (store.data == .s3) { - return store.data.s3.path(); - } - } - - return null; - } - - pub fn getLoader(blob: *const Blob, jsc_vm: *VirtualMachine) ?bun.options.Loader { - if (blob.getFileName()) |filename| { - const current_path = bun.fs.Path.init(filename); - return current_path.loader(&jsc_vm.transpiler.options.loaders) orelse .tsx; - } else if (blob.getMimeTypeOrContentType()) |mime_type| { - return .fromMimeType(mime_type); - } else { - // Be maximally permissive. - return .tsx; - } - } - - // TODO: Move this to a separate `File` object or BunFile - pub fn getLastModified( - this: *Blob, - _: *JSC.JSGlobalObject, - ) JSValue { - if (this.store) |store| { - if (store.data == .file) { - // last_modified can be already set during read. - if (store.data.file.last_modified == JSC.init_timestamp and !this.isS3()) { - resolveFileStat(store); - } - return JSValue.jsNumber(store.data.file.last_modified); - } - } - - if (this.is_jsdom_file) { - return JSValue.jsNumber(this.last_modified); - } - - return JSValue.jsNumber(JSC.init_timestamp); - } - - pub fn getSizeForBindings(this: *Blob) u64 { - if (this.size == Blob.max_size) { - this.resolveSize(); - } - - // If the file doesn't exist or is not seekable - // signal that the size is unknown. - if (this.store != null and this.store.?.data == .file and - !(this.store.?.data.file.seekable orelse false)) - { - return std.math.maxInt(u64); - } - - if (this.size == Blob.max_size) - return std.math.maxInt(u64); - - return this.size; - } - - export fn Bun__Blob__getSizeForBindings(this: *Blob) callconv(.C) u64 { - return this.getSizeForBindings(); - } - - comptime { - _ = Bun__Blob__getSizeForBindings; - } - pub fn getStat(this: *Blob, globalThis: *JSC.JSGlobalObject, callback: *JSC.CallFrame) bun.JSError!JSC.JSValue { - const store = this.store orelse return JSC.JSValue.jsUndefined(); - // TODO: make this async for files - return switch (store.data) { - .file => |*file| { - return switch (file.pathlike) { - .path => |path_like| { - return JSC.Node.Async.stat.create(globalThis, undefined, .{ - .path = .{ - .encoded_slice = switch (path_like) { - // it's already converted to utf8 - .encoded_slice => |slice| try slice.toOwned(bun.default_allocator), - else => try ZigString.init(path_like.slice()).toSliceClone(bun.default_allocator), - }, - }, - }, globalThis.bunVM()); - }, - .fd => |fd| JSC.Node.Async.fstat.create(globalThis, undefined, .{ .fd = fd }, globalThis.bunVM()), - }; - }, - .s3 => S3File.getStat(this, globalThis, callback), - else => JSC.JSValue.jsUndefined(), - }; - } - pub fn getSize(this: *Blob, _: *JSC.JSGlobalObject) JSValue { - if (this.size == Blob.max_size) { - if (this.isS3()) { - return JSC.JSValue.jsNumber(std.math.nan(f64)); - } - this.resolveSize(); - if (this.size == Blob.max_size and this.store != null) { - return JSC.jsNumber(std.math.inf(f64)); - } else if (this.size == 0 and this.store != null) { - if (this.store.?.data == .file and - (this.store.?.data.file.seekable orelse true) == false and - this.store.?.data.file.max_size == Blob.max_size) - { - return JSC.jsNumber(std.math.inf(f64)); - } - } - } - - return JSValue.jsNumber(this.size); - } - - pub fn resolveSize(this: *Blob) void { - if (this.store) |store| { - if (store.data == .bytes) { - const offset = this.offset; - const store_size = store.size(); - if (store_size != Blob.max_size) { - this.offset = @min(store_size, offset); - this.size = store_size - offset; - } - - return; - } else if (store.data == .file) { - if (store.data.file.seekable == null) { - resolveFileStat(store); - } - - if (store.data.file.seekable != null and store.data.file.max_size != Blob.max_size) { - const store_size = store.data.file.max_size; - const offset = this.offset; - - this.offset = @min(store_size, offset); - this.size = store_size -| offset; - return; - } - } - - this.size = 0; - } else { - this.size = 0; - } - } - - /// resolve file stat like size, last_modified - fn resolveFileStat(store: *Store) void { - if (store.data.file.pathlike == .path) { - var buffer: bun.PathBuffer = undefined; - switch (bun.sys.stat(store.data.file.pathlike.path.sliceZ(&buffer))) { - .result => |stat| { - store.data.file.max_size = if (bun.isRegularFile(stat.mode) or stat.size > 0) - @truncate(@as(u64, @intCast(@max(stat.size, 0)))) - else - Blob.max_size; - store.data.file.mode = @intCast(stat.mode); - store.data.file.seekable = bun.isRegularFile(stat.mode); - store.data.file.last_modified = JSC.toJSTime(stat.mtime().sec, stat.mtime().nsec); - }, - // the file may not exist yet. Thats's okay. - else => {}, - } - } else if (store.data.file.pathlike == .fd) { - switch (bun.sys.fstat(store.data.file.pathlike.fd)) { - .result => |stat| { - store.data.file.max_size = if (bun.isRegularFile(stat.mode) or stat.size > 0) - @as(SizeType, @truncate(@as(u64, @intCast(@max(stat.size, 0))))) - else - Blob.max_size; - store.data.file.mode = @intCast(stat.mode); - store.data.file.seekable = bun.isRegularFile(stat.mode); - store.data.file.last_modified = JSC.toJSTime(stat.mtime().sec, stat.mtime().nsec); - }, - // the file may not exist yet. Thats's okay. - else => {}, - } - } - } - - pub fn constructor(globalThis: *JSC.JSGlobalObject, callframe: *JSC.CallFrame) bun.JSError!*Blob { - const allocator = bun.default_allocator; - var blob: Blob = undefined; - var arguments = callframe.arguments_old(2); - const args = arguments.slice(); - - switch (args.len) { - 0 => { - const empty: []u8 = &[_]u8{}; - blob = Blob.init(empty, allocator, globalThis); - }, - else => { - blob = get(globalThis, args[0], false, true) catch |err| switch (err) { - error.OutOfMemory, error.JSError => |e| return e, - error.InvalidArguments => return globalThis.throwInvalidArguments("new Blob() expects an Array", .{}), - }; - - if (args.len > 1) { - const options = args[1]; - if (options.isObject()) { - // type, the ASCII-encoded string in lower case - // representing the media type of the Blob. - // Normative conditions for this member are provided - // in the § 3.1 Constructors. - if (try options.get(globalThis, "type")) |content_type| { - inner: { - if (content_type.isString()) { - var content_type_str = try content_type.toSlice(globalThis, bun.default_allocator); - defer content_type_str.deinit(); - const slice = content_type_str.slice(); - if (!strings.isAllASCII(slice)) { - break :inner; - } - blob.content_type_was_set = true; - - if (globalThis.bunVM().mimeType(slice)) |mime| { - blob.content_type = mime.value; - break :inner; - } - const content_type_buf = allocator.alloc(u8, slice.len) catch bun.outOfMemory(); - blob.content_type = strings.copyLowercase(slice, content_type_buf); - blob.content_type_allocated = true; - } - } - } - } - } - - if (blob.content_type.len == 0) { - blob.content_type = ""; - blob.content_type_was_set = false; - } - }, - } - - blob.calculateEstimatedByteSize(); - - var blob_ = Blob.new(blob); - blob_.allocator = allocator; - return blob_; - } - - pub fn finalize(this: *Blob) void { - this.deinit(); - } - - pub fn initWithAllASCII(bytes: []u8, allocator: std.mem.Allocator, globalThis: *JSGlobalObject, is_all_ascii: bool) Blob { - // avoid allocating a Blob.Store if the buffer is actually empty - var store: ?*Blob.Store = null; - if (bytes.len > 0) { - store = Blob.Store.init(bytes, allocator); - store.?.is_all_ascii = is_all_ascii; - } - return Blob{ - .size = @as(SizeType, @truncate(bytes.len)), - .store = store, - .allocator = null, - .content_type = "", - .globalThis = globalThis, - .is_all_ascii = is_all_ascii, - }; - } - - /// Takes ownership of `bytes`, which must have been allocated with `allocator`. - pub fn init(bytes: []u8, allocator: std.mem.Allocator, globalThis: *JSGlobalObject) Blob { - return Blob{ - .size = @as(SizeType, @truncate(bytes.len)), - .store = if (bytes.len > 0) - Blob.Store.init(bytes, allocator) - else - null, - .allocator = null, - .content_type = "", - .globalThis = globalThis, - }; - } - - pub fn createWithBytesAndAllocator( - bytes: []u8, - allocator: std.mem.Allocator, - globalThis: *JSGlobalObject, - was_string: bool, - ) Blob { - return Blob{ - .size = @as(SizeType, @truncate(bytes.len)), - .store = if (bytes.len > 0) - Blob.Store.init(bytes, allocator) - else - null, - .allocator = null, - .content_type = if (was_string) MimeType.text.value else "", - .globalThis = globalThis, - }; - } - - pub fn tryCreate( - bytes_: []const u8, - allocator_: std.mem.Allocator, - globalThis: *JSGlobalObject, - was_string: bool, - ) !Blob { - if (comptime Environment.isLinux) { - if (bun.linux.memfd_allocator.shouldUse(bytes_)) { - switch (bun.linux.memfd_allocator.create(bytes_)) { - .err => {}, - .result => |result| { - const store = Store.new( - .{ - .data = .{ - .bytes = result, - }, - .allocator = bun.default_allocator, - .ref_count = std.atomic.Value(u32).init(1), - }, - ); - var blob = initWithStore(store, globalThis); - if (was_string and blob.content_type.len == 0) { - blob.content_type = MimeType.text.value; - } - - return blob; - }, - } - } - } - - return createWithBytesAndAllocator(try allocator_.dupe(u8, bytes_), allocator_, globalThis, was_string); - } - - pub fn create( - bytes_: []const u8, - allocator_: std.mem.Allocator, - globalThis: *JSGlobalObject, - was_string: bool, - ) Blob { - return tryCreate(bytes_, allocator_, globalThis, was_string) catch bun.outOfMemory(); - } - - pub fn initWithStore(store: *Blob.Store, globalThis: *JSGlobalObject) Blob { - return Blob{ - .size = store.size(), - .store = store, - .allocator = null, - .content_type = if (store.data == .file) - store.data.file.mime_type.value - else - "", - .globalThis = globalThis, - }; - } - - pub fn initEmpty(globalThis: *JSGlobalObject) Blob { - return Blob{ - .size = 0, - .store = null, - .allocator = null, - .content_type = "", - .globalThis = globalThis, - }; - } - - // Transferring doesn't change the reference count - // It is a move - inline fn transfer(this: *Blob) void { - this.store = null; - } - - pub fn detach(this: *Blob) void { - if (this.store != null) this.store.?.deref(); - this.store = null; - } - - /// This does not duplicate - /// This creates a new view - /// and increment the reference count - pub fn dupe(this: *const Blob) Blob { - return this.dupeWithContentType(false); - } - - pub fn dupeWithContentType(this: *const Blob, include_content_type: bool) Blob { - if (this.store != null) this.store.?.ref(); - var duped = this.*; - if (duped.content_type_allocated and duped.allocator != null and !include_content_type) { - - // for now, we just want to avoid a use-after-free here - if (JSC.VirtualMachine.get().mimeType(duped.content_type)) |mime| { - duped.content_type = mime.value; - } else { - // TODO: fix this - // this is a bug. - // it means whenever - duped.content_type = ""; - } - - duped.content_type_allocated = false; - duped.content_type_was_set = false; - if (this.content_type_was_set) { - duped.content_type_was_set = duped.content_type.len > 0; - } - } else if (duped.content_type_allocated and duped.allocator != null and include_content_type) { - duped.content_type = bun.default_allocator.dupe(u8, this.content_type) catch bun.outOfMemory(); - } - duped.name = duped.name.dupeRef(); - - duped.allocator = null; - return duped; - } - - pub fn toJS(this: *Blob, globalObject: *JSC.JSGlobalObject) JSC.JSValue { - // if (comptime Environment.allow_assert) { - // assert(this.allocator != null); - // } - this.calculateEstimatedByteSize(); - - if (this.isS3()) { - return S3File.toJSUnchecked(globalObject, this); - } - - return js.toJSUnchecked(globalObject, this); - } - - pub fn deinit(this: *Blob) void { - this.detach(); - this.name.deref(); - this.name = .dead; - - // TODO: remove this field, make it a boolean. - if (this.allocator) |alloc| { - this.allocator = null; - bun.debugAssert(alloc.vtable == bun.default_allocator.vtable); - bun.destroy(this); - } - } - - pub fn sharedView(this: *const Blob) []const u8 { - if (this.size == 0 or this.store == null) return ""; - var slice_ = this.store.?.sharedView(); - if (slice_.len == 0) return ""; - slice_ = slice_[this.offset..]; - - return slice_[0..@min(slice_.len, @as(usize, this.size))]; - } - - pub const Lifetime = JSC.WebCore.Lifetime; - pub fn setIsASCIIFlag(this: *Blob, is_all_ascii: bool) void { - this.is_all_ascii = is_all_ascii; - // if this Blob represents the entire binary data - // which will be pretty common - // we can update the store's is_all_ascii flag - // and any other Blob that points to the same store - // can skip checking the encoding - if (this.size > 0 and this.offset == 0 and this.store.?.data == .bytes) { - this.store.?.is_all_ascii = is_all_ascii; - } - } - - pub fn needsToReadFile(this: *const Blob) bool { - return this.store != null and (this.store.?.data == .file); - } - - pub fn toStringWithBytes(this: *Blob, global: *JSGlobalObject, raw_bytes: []const u8, comptime lifetime: Lifetime) bun.JSError!JSValue { - const bom, const buf = strings.BOM.detectAndSplit(raw_bytes); - - if (buf.len == 0) { - // If all it contained was the bom, we need to free the bytes - if (lifetime == .temporary) bun.default_allocator.free(raw_bytes); - return ZigString.Empty.toJS(global); - } - - if (bom == .utf16_le) { - defer if (lifetime == .temporary) bun.default_allocator.free(raw_bytes); - var out = bun.String.createUTF16(bun.reinterpretSlice(u16, buf)); - defer out.deref(); - return out.toJS(global); - } - - // null == unknown - // false == can't be - const could_be_all_ascii = this.is_all_ascii orelse this.store.?.is_all_ascii; - - if (could_be_all_ascii == null or !could_be_all_ascii.?) { - // if toUTF16Alloc returns null, it means there are no non-ASCII characters - // instead of erroring, invalid characters will become a U+FFFD replacement character - if (strings.toUTF16Alloc(bun.default_allocator, buf, false, false) catch return global.throwOutOfMemory()) |external| { - if (lifetime != .temporary) - this.setIsASCIIFlag(false); - - if (lifetime == .transfer) { - this.detach(); - } - - if (lifetime == .temporary) { - bun.default_allocator.free(raw_bytes); - } - - return ZigString.toExternalU16(external.ptr, external.len, global); - } - - if (lifetime != .temporary) this.setIsASCIIFlag(true); - } - - switch (comptime lifetime) { - // strings are immutable - // we don't need to clone - .clone => { - this.store.?.ref(); - // we don't need to worry about UTF-8 BOM in this case because the store owns the memory. - return ZigString.init(buf).external(global, this.store.?, Store.external); - }, - .transfer => { - const store = this.store.?; - assert(store.data == .bytes); - this.transfer(); - // we don't need to worry about UTF-8 BOM in this case because the store owns the memory. - return ZigString.init(buf).external(global, store, Store.external); - }, - // strings are immutable - // sharing isn't really a thing - .share => { - this.store.?.ref(); - // we don't need to worry about UTF-8 BOM in this case because the store owns the memory.s - return ZigString.init(buf).external(global, this.store.?, Store.external); - }, - .temporary => { - // if there was a UTF-8 BOM, we need to clone the buffer because - // external doesn't support this case here yet. - if (buf.len != raw_bytes.len) { - var out = bun.String.createLatin1(buf); - defer { - bun.default_allocator.free(raw_bytes); - out.deref(); - } - - return out.toJS(global); - } - - return ZigString.init(buf).toExternalValue(global); - }, - } - } - - pub fn toStringTransfer(this: *Blob, global: *JSGlobalObject) bun.JSError!JSValue { - return this.toString(global, .transfer); - } - - pub fn toString(this: *Blob, global: *JSGlobalObject, comptime lifetime: Lifetime) bun.JSError!JSValue { - if (this.needsToReadFile()) { - return this.doReadFile(toStringWithBytes, global); - } - if (this.isS3()) { - return this.doReadFromS3(toStringWithBytes, global); - } - - const view_: []u8 = - @constCast(this.sharedView()); - - if (view_.len == 0) - return ZigString.Empty.toJS(global); - - return toStringWithBytes(this, global, view_, lifetime); - } - - pub fn toJSON(this: *Blob, global: *JSGlobalObject, comptime lifetime: Lifetime) bun.JSError!JSValue { - if (this.needsToReadFile()) { - return this.doReadFile(toJSONWithBytes, global); - } - if (this.isS3()) { - return this.doReadFromS3(toJSONWithBytes, global); - } - - const view_ = this.sharedView(); - - return toJSONWithBytes(this, global, view_, lifetime); - } - - pub fn toJSONWithBytes(this: *Blob, global: *JSGlobalObject, raw_bytes: []const u8, comptime lifetime: Lifetime) bun.JSError!JSValue { - const bom, const buf = strings.BOM.detectAndSplit(raw_bytes); - if (buf.len == 0) return global.createSyntaxErrorInstance("Unexpected end of JSON input", .{}); - - if (bom == .utf16_le) { - var out = bun.String.createUTF16(bun.reinterpretSlice(u16, buf)); - defer if (lifetime == .temporary) bun.default_allocator.free(raw_bytes); - defer if (lifetime == .transfer) this.detach(); - defer out.deref(); - return out.toJSByParseJSON(global); - } - // null == unknown - // false == can't be - const could_be_all_ascii = this.is_all_ascii orelse this.store.?.is_all_ascii; - defer if (comptime lifetime == .temporary) bun.default_allocator.free(@constCast(buf)); - - if (could_be_all_ascii == null or !could_be_all_ascii.?) { - var stack_fallback = std.heap.stackFallback(4096, bun.default_allocator); - const allocator = stack_fallback.get(); - // if toUTF16Alloc returns null, it means there are no non-ASCII characters - if (strings.toUTF16Alloc(allocator, buf, false, false) catch null) |external| { - if (comptime lifetime != .temporary) this.setIsASCIIFlag(false); - const result = ZigString.initUTF16(external).toJSONObject(global); - allocator.free(external); - return result; - } - - if (comptime lifetime != .temporary) this.setIsASCIIFlag(true); - } - - return ZigString.init(buf).toJSONObject(global); - } - - pub fn toFormDataWithBytes(this: *Blob, global: *JSGlobalObject, buf: []u8, comptime _: Lifetime) JSValue { - var encoder = this.getFormDataEncoding() orelse return { - return ZigString.init("Invalid encoding").toErrorInstance(global); - }; - defer encoder.deinit(); - - return bun.FormData.toJS(global, buf, encoder.encoding) catch |err| - global.createErrorInstance("FormData encoding failed: {s}", .{@errorName(err)}); - } - - pub fn toArrayBufferWithBytes(this: *Blob, global: *JSGlobalObject, buf: []u8, comptime lifetime: Lifetime) bun.JSError!JSValue { - return toArrayBufferViewWithBytes(this, global, buf, lifetime, .ArrayBuffer); - } - - pub fn toUint8ArrayWithBytes(this: *Blob, global: *JSGlobalObject, buf: []u8, comptime lifetime: Lifetime) bun.JSError!JSValue { - return toArrayBufferViewWithBytes(this, global, buf, lifetime, .Uint8Array); - } - - pub fn toArrayBufferViewWithBytes(this: *Blob, global: *JSGlobalObject, buf: []u8, comptime lifetime: Lifetime, comptime TypedArrayView: JSC.JSValue.JSType) bun.JSError!JSValue { - switch (comptime lifetime) { - .clone => { - if (TypedArrayView != .ArrayBuffer) { - // ArrayBuffer doesn't have this limit. - if (buf.len > JSC.synthetic_allocation_limit) { - this.detach(); - return global.throwOutOfMemory(); - } - } - - if (comptime Environment.isLinux) { - // If we can use a copy-on-write clone of the buffer, do so. - if (this.store) |store| { - if (store.data == .bytes) { - const allocated_slice = store.data.bytes.allocatedSlice(); - if (bun.isSliceInBuffer(buf, allocated_slice)) { - if (bun.linux.memfd_allocator.from(store.data.bytes.allocator)) |allocator| { - allocator.ref(); - defer allocator.deref(); - - const byteOffset = @as(usize, @intFromPtr(buf.ptr)) -| @as(usize, @intFromPtr(allocated_slice.ptr)); - const byteLength = buf.len; - - const result = JSC.ArrayBuffer.toArrayBufferFromSharedMemfd( - allocator.fd.cast(), - global, - byteOffset, - byteLength, - allocated_slice.len, - TypedArrayView, - ); - bloblog("toArrayBuffer COW clone({d}, {d}) = {d}", .{ byteOffset, byteLength, @intFromBool(result != .zero) }); - - if (result != .zero) { - return result; - } - } - } - } - } - } - return JSC.ArrayBuffer.create(global, buf, TypedArrayView); - }, - .share => { - if (buf.len > JSC.synthetic_allocation_limit and TypedArrayView != .ArrayBuffer) { - return global.throwOutOfMemory(); - } - - this.store.?.ref(); - return JSC.ArrayBuffer.fromBytes(buf, TypedArrayView).toJSWithContext( - global, - this.store.?, - JSC.BlobArrayBuffer_deallocator, - null, - ); - }, - .transfer => { - if (buf.len > JSC.synthetic_allocation_limit and TypedArrayView != .ArrayBuffer) { - this.detach(); - return global.throwOutOfMemory(); - } - - const store = this.store.?; - this.transfer(); - return JSC.ArrayBuffer.fromBytes(buf, TypedArrayView).toJSWithContext( - global, - store, - JSC.BlobArrayBuffer_deallocator, - null, - ); - }, - .temporary => { - if (buf.len > JSC.synthetic_allocation_limit and TypedArrayView != .ArrayBuffer) { - bun.default_allocator.free(buf); - return global.throwOutOfMemory(); - } - - return JSC.ArrayBuffer.fromBytes(buf, TypedArrayView).toJS( - global, - null, - ); - }, - } - } - - pub fn toArrayBuffer(this: *Blob, global: *JSGlobalObject, comptime lifetime: Lifetime) bun.JSError!JSValue { - bloblog("toArrayBuffer", .{}); - return toArrayBufferView(this, global, lifetime, .ArrayBuffer); - } - - pub fn toUint8Array(this: *Blob, global: *JSGlobalObject, comptime lifetime: Lifetime) bun.JSError!JSValue { - bloblog("toUin8Array", .{}); - return toArrayBufferView(this, global, lifetime, .Uint8Array); - } - - pub fn toArrayBufferView(this: *Blob, global: *JSGlobalObject, comptime lifetime: Lifetime, comptime TypedArrayView: JSC.JSValue.JSType) bun.JSError!JSValue { - const WithBytesFn = comptime if (TypedArrayView == .Uint8Array) - toUint8ArrayWithBytes - else - toArrayBufferWithBytes; - if (this.needsToReadFile()) { - return this.doReadFile(WithBytesFn, global); - } - - if (this.isS3()) { - return this.doReadFromS3(WithBytesFn, global); - } - - const view_ = this.sharedView(); - if (view_.len == 0) - return JSC.ArrayBuffer.create(global, "", TypedArrayView); - - return WithBytesFn(this, global, @constCast(view_), lifetime); - } - - pub fn toFormData(this: *Blob, global: *JSGlobalObject, comptime lifetime: Lifetime) JSValue { - if (this.needsToReadFile()) { - return this.doReadFile(toFormDataWithBytes, global); - } - if (this.isS3()) { - return this.doReadFromS3(toFormDataWithBytes, global); - } - - const view_ = this.sharedView(); - - if (view_.len == 0) - return JSC.DOMFormData.create(global); - - return toFormDataWithBytes(this, global, @constCast(view_), lifetime); - } - - const FromJsError = bun.JSError || error{InvalidArguments}; - - pub inline fn get( - global: *JSGlobalObject, - arg: JSValue, - comptime move: bool, - comptime require_array: bool, - ) FromJsError!Blob { - return fromJSMovable(global, arg, move, require_array); - } - - pub inline fn fromJSMove(global: *JSGlobalObject, arg: JSValue) FromJsError!Blob { - return fromJSWithoutDeferGC(global, arg, true, false); - } - - pub inline fn fromJSClone(global: *JSGlobalObject, arg: JSValue) FromJsError!Blob { - return fromJSWithoutDeferGC(global, arg, false, true); - } - - pub inline fn fromJSCloneOptionalArray(global: *JSGlobalObject, arg: JSValue) FromJsError!Blob { - return fromJSWithoutDeferGC(global, arg, false, false); - } - - fn fromJSMovable( - global: *JSGlobalObject, - arg: JSValue, - comptime move: bool, - comptime require_array: bool, - ) FromJsError!Blob { - const FromJSFunction = if (comptime move and !require_array) - fromJSMove - else if (!require_array) - fromJSCloneOptionalArray - else - fromJSClone; - - return FromJSFunction(global, arg); - } - - fn fromJSWithoutDeferGC( - global: *JSGlobalObject, - arg: JSValue, - comptime move: bool, - comptime require_array: bool, - ) FromJsError!Blob { - var current = arg; - if (current.isUndefinedOrNull()) { - return Blob{ .globalThis = global }; - } - - var top_value = current; - var might_only_be_one_thing = false; - arg.ensureStillAlive(); - defer arg.ensureStillAlive(); - var fail_if_top_value_is_not_typed_array_like = false; - switch (current.jsTypeLoose()) { - .Array, .DerivedArray => { - var top_iter = JSC.JSArrayIterator.init(current, global); - might_only_be_one_thing = top_iter.len == 1; - if (top_iter.len == 0) { - return Blob{ .globalThis = global }; - } - if (might_only_be_one_thing) { - top_value = top_iter.next().?; - } - }, - else => { - might_only_be_one_thing = true; - if (require_array) { - fail_if_top_value_is_not_typed_array_like = true; - } - }, - } - - if (might_only_be_one_thing or !move) { - - // Fast path: one item, we don't need to join - switch (top_value.jsTypeLoose()) { - .Cell, - .NumberObject, - JSC.JSValue.JSType.String, - JSC.JSValue.JSType.StringObject, - JSC.JSValue.JSType.DerivedStringObject, - => { - if (!fail_if_top_value_is_not_typed_array_like) { - var str = try top_value.toBunString(global); - defer str.deref(); - const bytes, const ascii = try str.toOwnedSliceReturningAllASCII(bun.default_allocator); - return Blob.initWithAllASCII(bytes, bun.default_allocator, global, ascii); - } - }, - - JSC.JSValue.JSType.ArrayBuffer, - JSC.JSValue.JSType.Int8Array, - JSC.JSValue.JSType.Uint8Array, - JSC.JSValue.JSType.Uint8ClampedArray, - JSC.JSValue.JSType.Int16Array, - JSC.JSValue.JSType.Uint16Array, - JSC.JSValue.JSType.Int32Array, - JSC.JSValue.JSType.Uint32Array, - JSC.JSValue.JSType.Float16Array, - JSC.JSValue.JSType.Float32Array, - JSC.JSValue.JSType.Float64Array, - JSC.JSValue.JSType.BigInt64Array, - JSC.JSValue.JSType.BigUint64Array, - JSC.JSValue.JSType.DataView, - => { - return try Blob.tryCreate(top_value.asArrayBuffer(global).?.byteSlice(), bun.default_allocator, global, false); - }, - - .DOMWrapper => { - if (!fail_if_top_value_is_not_typed_array_like) { - if (top_value.as(Blob)) |blob| { - if (comptime move) { - var _blob = blob.*; - _blob.allocator = null; - blob.transfer(); - return _blob; - } else { - return blob.dupe(); - } - } else if (top_value.as(JSC.API.BuildArtifact)) |build| { - if (comptime move) { - // I don't think this case should happen? - var blob = build.blob; - blob.transfer(); - return blob; - } else { - return build.blob.dupe(); - } - } else if (current.toSliceClone(global)) |sliced| { - if (sliced.allocator.get()) |allocator| { - return Blob.initWithAllASCII(@constCast(sliced.slice()), allocator, global, false); - } - } - } - }, - - else => {}, - } - - // new Blob("ok") - // new File("ok", "file.txt") - if (fail_if_top_value_is_not_typed_array_like) { - return error.InvalidArguments; - } - } - - var stack_allocator = std.heap.stackFallback(1024, bun.default_allocator); - const stack_mem_all = stack_allocator.get(); - var stack: std.ArrayList(JSValue) = std.ArrayList(JSValue).init(stack_mem_all); - var joiner = StringJoiner{ .allocator = stack_mem_all }; - var could_have_non_ascii = false; - - defer if (stack_allocator.fixed_buffer_allocator.end_index >= 1024) stack.deinit(); - - while (true) { - switch (current.jsTypeLoose()) { - .NumberObject, - JSC.JSValue.JSType.String, - JSC.JSValue.JSType.StringObject, - JSC.JSValue.JSType.DerivedStringObject, - => { - var sliced = try current.toSlice(global, bun.default_allocator); - const allocator = sliced.allocator.get(); - could_have_non_ascii = could_have_non_ascii or !sliced.allocator.isWTFAllocator(); - joiner.push(sliced.slice(), allocator); - }, - - .Array, .DerivedArray => { - var iter = JSC.JSArrayIterator.init(current, global); - try stack.ensureUnusedCapacity(iter.len); - var any_arrays = false; - while (iter.next()) |item| { - if (item.isUndefinedOrNull()) continue; - - // When it's a string or ArrayBuffer inside an array, we can avoid the extra push/pop - // we only really want this for nested arrays - // However, we must preserve the order - // That means if there are any arrays - // we have to restart the loop - if (!any_arrays) { - switch (item.jsTypeLoose()) { - .NumberObject, - .Cell, - .String, - .StringObject, - .DerivedStringObject, - => { - var sliced = try item.toSlice(global, bun.default_allocator); - const allocator = sliced.allocator.get(); - could_have_non_ascii = could_have_non_ascii or !sliced.allocator.isWTFAllocator(); - joiner.push(sliced.slice(), allocator); - continue; - }, - .ArrayBuffer, - .Int8Array, - .Uint8Array, - .Uint8ClampedArray, - .Int16Array, - .Uint16Array, - .Int32Array, - .Uint32Array, - .Float16Array, - .Float32Array, - .Float64Array, - .BigInt64Array, - .BigUint64Array, - .DataView, - => { - could_have_non_ascii = true; - var buf = item.asArrayBuffer(global).?; - joiner.pushStatic(buf.byteSlice()); - continue; - }, - .Array, .DerivedArray => { - any_arrays = true; - could_have_non_ascii = true; - break; - }, - - .DOMWrapper => { - if (item.as(Blob)) |blob| { - could_have_non_ascii = could_have_non_ascii or !(blob.is_all_ascii orelse false); - joiner.pushStatic(blob.sharedView()); - continue; - } else if (current.toSliceClone(global)) |sliced| { - const allocator = sliced.allocator.get(); - could_have_non_ascii = could_have_non_ascii or allocator != null; - joiner.push(sliced.slice(), allocator); - } - }, - else => {}, - } - } - - stack.appendAssumeCapacity(item); - } - }, - - .DOMWrapper => { - if (current.as(Blob)) |blob| { - could_have_non_ascii = could_have_non_ascii or !(blob.is_all_ascii orelse false); - joiner.pushStatic(blob.sharedView()); - } else if (current.toSliceClone(global)) |sliced| { - const allocator = sliced.allocator.get(); - could_have_non_ascii = could_have_non_ascii or allocator != null; - joiner.push(sliced.slice(), allocator); - } - }, - - .ArrayBuffer, - .Int8Array, - .Uint8Array, - .Uint8ClampedArray, - .Int16Array, - .Uint16Array, - .Int32Array, - .Uint32Array, - .Float16Array, - .Float32Array, - .Float64Array, - .BigInt64Array, - .BigUint64Array, - .DataView, - => { - var buf = current.asArrayBuffer(global).?; - joiner.pushStatic(buf.slice()); - could_have_non_ascii = true; - }, - - else => { - var sliced = try current.toSlice(global, bun.default_allocator); - if (global.hasException()) { - const end_result = try joiner.done(bun.default_allocator); - bun.default_allocator.free(end_result); - return error.JSError; - } - could_have_non_ascii = could_have_non_ascii or !sliced.allocator.isWTFAllocator(); - joiner.push(sliced.slice(), sliced.allocator.get()); - }, - } - current = stack.pop() orelse break; - } - - const joined = try joiner.done(bun.default_allocator); - - if (!could_have_non_ascii) { - return Blob.initWithAllASCII(joined, bun.default_allocator, global, true); - } - return Blob.init(joined, bun.default_allocator, global); - } -}; - -pub const AnyBlob = union(enum) { - Blob: Blob, - InternalBlob: InternalBlob, - WTFStringImpl: bun.WTF.StringImpl, - - pub fn fromOwnedSlice(allocator: std.mem.Allocator, bytes: []u8) AnyBlob { - return .{ .InternalBlob = .{ .bytes = .fromOwnedSlice(allocator, bytes) } }; - } - - pub fn fromArrayList(list: std.ArrayList(u8)) AnyBlob { - return .{ .InternalBlob = .{ .bytes = list } }; - } - - /// Assumed that AnyBlob itself is covered by the caller. - pub fn memoryCost(this: *const AnyBlob) usize { - return switch (this.*) { - .Blob => |*blob| if (blob.store) |blob_store| blob_store.memoryCost() else 0, - .WTFStringImpl => |str| if (str.refCount() == 1) str.memoryCost() else 0, - .InternalBlob => |*internal_blob| internal_blob.memoryCost(), - }; - } - - pub fn hasOneRef(this: *const AnyBlob) bool { - if (this.store()) |s| { - return s.hasOneRef(); - } - - return false; - } - - pub fn getFileName(this: *const AnyBlob) ?[]const u8 { - return switch (this.*) { - .Blob => this.Blob.getFileName(), - .WTFStringImpl => null, - .InternalBlob => null, - }; - } - - pub inline fn fastSize(this: *const AnyBlob) Blob.SizeType { - return switch (this.*) { - .Blob => this.Blob.size, - .WTFStringImpl => @truncate(this.WTFStringImpl.byteLength()), - .InternalBlob => @truncate(this.slice().len), - }; - } - - pub inline fn size(this: *const AnyBlob) Blob.SizeType { - return switch (this.*) { - .Blob => this.Blob.size, - .WTFStringImpl => @truncate(this.WTFStringImpl.utf8ByteLength()), - else => @truncate(this.slice().len), - }; - } - - pub fn hasContentTypeFromUser(this: AnyBlob) bool { - return switch (this) { - .Blob => this.Blob.hasContentTypeFromUser(), - .WTFStringImpl => false, - .InternalBlob => false, - }; - } - - fn toInternalBlobIfPossible(this: *AnyBlob) void { - if (this.* == .Blob) { - if (this.Blob.store) |s| { - if (s.data == .bytes and s.hasOneRef()) { - this.* = .{ .InternalBlob = s.data.bytes.toInternalBlob() }; - s.deref(); - return; - } - } - } - } - - pub fn toActionValue(this: *AnyBlob, globalThis: *JSGlobalObject, action: JSC.WebCore.BufferedReadableStreamAction) bun.JSError!JSC.JSValue { - if (action != .blob) { - this.toInternalBlobIfPossible(); - } - - switch (action) { - .text => { - if (this.* == .Blob) { - return this.toString(globalThis, .clone); - } - - return this.toStringTransfer(globalThis); - }, - .bytes => { - if (this.* == .Blob) { - return this.toArrayBufferView(globalThis, .clone, .Uint8Array); - } - - return this.toUint8ArrayTransfer(globalThis); - }, - .blob => { - const result = Blob.new(this.toBlob(globalThis)); - result.allocator = bun.default_allocator; - result.globalThis = globalThis; - return result.toJS(globalThis); - }, - .arrayBuffer => { - if (this.* == .Blob) { - return this.toArrayBufferView(globalThis, .clone, .ArrayBuffer); - } - - return this.toArrayBufferTransfer(globalThis); - }, - .json => { - return this.toJSON(globalThis, .share); - }, - } - } - - pub fn toPromise(this: *AnyBlob, globalThis: *JSGlobalObject, action: JSC.WebCore.BufferedReadableStreamAction) JSC.JSValue { - return JSC.JSPromise.wrap(globalThis, toActionValue, .{ this, globalThis, action }); - } - - pub fn wrap(this: *AnyBlob, promise: JSC.AnyPromise, globalThis: *JSGlobalObject, action: JSC.WebCore.BufferedReadableStreamAction) void { - promise.wrap(globalThis, toActionValue, .{ this, globalThis, action }); - } - - pub fn toJSON(this: *AnyBlob, global: *JSGlobalObject, comptime lifetime: JSC.WebCore.Lifetime) bun.JSError!JSValue { - switch (this.*) { - .Blob => return this.Blob.toJSON(global, lifetime), - // .InlineBlob => { - // if (this.InlineBlob.len == 0) { - // return JSValue.jsNull(); - // } - // var str = this.InlineBlob.toStringOwned(global); - // return str.parseJSON(global); - // }, - .InternalBlob => { - if (this.InternalBlob.bytes.items.len == 0) { - return JSValue.jsNull(); - } - - const str = this.InternalBlob.toJSON(global); - - // the GC will collect the string - this.* = .{ - .Blob = .{}, - }; - - return str; - }, - .WTFStringImpl => { - var str = bun.String.init(this.WTFStringImpl); - defer str.deref(); - this.* = .{ - .Blob = .{}, - }; - - if (str.length() == 0) { - return JSValue.jsNull(); - } - - return str.toJSByParseJSON(global); - }, - } - } - - pub fn toJSONShare(this: *AnyBlob, global: *JSGlobalObject) bun.JSError!JSValue { - return this.toJSON(global, .share); - } - - pub fn toStringTransfer(this: *AnyBlob, global: *JSGlobalObject) bun.JSError!JSValue { - return this.toString(global, .transfer); - } - - pub fn toUint8ArrayTransfer(this: *AnyBlob, global: *JSGlobalObject) bun.JSError!JSValue { - return this.toUint8Array(global, .transfer); - } - - pub fn toArrayBufferTransfer(this: *AnyBlob, global: *JSGlobalObject) bun.JSError!JSValue { - return this.toArrayBuffer(global, .transfer); - } - - pub fn toBlob(this: *AnyBlob, global: *JSGlobalObject) Blob { - if (this.size() == 0) { - return Blob.initEmpty(global); - } - - if (this.* == .Blob) { - return this.Blob.dupe(); - } - - if (this.* == .WTFStringImpl) { - const blob = Blob.create(this.slice(), bun.default_allocator, global, true); - this.* = .{ .Blob = .{} }; - return blob; - } - - const blob = Blob.init(this.InternalBlob.slice(), this.InternalBlob.bytes.allocator, global); - this.* = .{ .Blob = .{} }; - return blob; - } - - pub fn toString(this: *AnyBlob, global: *JSGlobalObject, comptime lifetime: JSC.WebCore.Lifetime) bun.JSError!JSValue { - switch (this.*) { - .Blob => return this.Blob.toString(global, lifetime), - // .InlineBlob => { - // if (this.InlineBlob.len == 0) { - // return ZigString.Empty.toValue(global); - // } - // const owned = this.InlineBlob.toStringOwned(global); - // this.* = .{ .InlineBlob = .{ .len = 0 } }; - // return owned; - // }, - .InternalBlob => { - if (this.InternalBlob.bytes.items.len == 0) { - return ZigString.Empty.toJS(global); - } - - const owned = this.InternalBlob.toStringOwned(global); - this.* = .{ .Blob = .{} }; - return owned; - }, - .WTFStringImpl => { - var str = bun.String.init(this.WTFStringImpl); - defer str.deref(); - this.* = .{ .Blob = .{} }; - - return str.toJS(global); - }, - } - } - - pub fn toArrayBuffer(this: *AnyBlob, global: *JSGlobalObject, comptime lifetime: JSC.WebCore.Lifetime) bun.JSError!JSValue { - return this.toArrayBufferView(global, lifetime, .ArrayBuffer); - } - - pub fn toUint8Array(this: *AnyBlob, global: *JSGlobalObject, comptime lifetime: JSC.WebCore.Lifetime) bun.JSError!JSValue { - return this.toArrayBufferView(global, lifetime, .Uint8Array); - } - - pub fn toArrayBufferView(this: *AnyBlob, global: *JSGlobalObject, comptime lifetime: JSC.WebCore.Lifetime, comptime TypedArrayView: JSC.JSValue.JSType) bun.JSError!JSValue { - switch (this.*) { - .Blob => return this.Blob.toArrayBufferView(global, lifetime, TypedArrayView), - // .InlineBlob => { - // if (this.InlineBlob.len == 0) { - // return JSC.ArrayBuffer.create(global, "", .ArrayBuffer); - // } - // var bytes = this.InlineBlob.sliceConst(); - // this.InlineBlob.len = 0; - // const value = JSC.ArrayBuffer.create( - // global, - // bytes, - // .ArrayBuffer, - // ); - // return value; - // }, - .InternalBlob => { - if (this.InternalBlob.bytes.items.len == 0) { - return JSC.ArrayBuffer.create(global, "", TypedArrayView); - } - - const bytes = this.InternalBlob.toOwnedSlice(); - this.* = .{ .Blob = .{} }; - - return JSC.ArrayBuffer.fromDefaultAllocator( - global, - bytes, - TypedArrayView, - ); - }, - .WTFStringImpl => { - const str = bun.String.init(this.WTFStringImpl); - this.* = .{ .Blob = .{} }; - defer str.deref(); - - const out_bytes = str.toUTF8WithoutRef(bun.default_allocator); - if (out_bytes.isAllocated()) { - return JSC.ArrayBuffer.fromDefaultAllocator( - global, - @constCast(out_bytes.slice()), - TypedArrayView, - ); - } - - return JSC.ArrayBuffer.create(global, out_bytes.slice(), TypedArrayView); - }, - } - } - - pub fn isDetached(this: *const AnyBlob) bool { - return switch (this.*) { - .Blob => |blob| blob.isDetached(), - .InternalBlob => this.InternalBlob.bytes.items.len == 0, - .WTFStringImpl => this.WTFStringImpl.length() == 0, - }; - } - - pub fn store(this: *const @This()) ?*Blob.Store { - if (this.* == .Blob) { - return this.Blob.store; - } - - return null; - } - - pub fn contentType(self: *const @This()) []const u8 { - return switch (self.*) { - .Blob => self.Blob.content_type, - .WTFStringImpl => MimeType.text.value, - // .InlineBlob => self.InlineBlob.contentType(), - .InternalBlob => self.InternalBlob.contentType(), - }; - } - - pub fn wasString(self: *const @This()) bool { - return switch (self.*) { - .Blob => self.Blob.is_all_ascii orelse false, - .WTFStringImpl => true, - // .InlineBlob => self.InlineBlob.was_string, - .InternalBlob => self.InternalBlob.was_string, - }; - } - - pub inline fn slice(self: *const @This()) []const u8 { - return switch (self.*) { - .Blob => self.Blob.sharedView(), - .WTFStringImpl => self.WTFStringImpl.utf8Slice(), - // .InlineBlob => self.InlineBlob.sliceConst(), - .InternalBlob => self.InternalBlob.sliceConst(), - }; - } - - pub fn needsToReadFile(self: *const @This()) bool { - return switch (self.*) { - .Blob => self.Blob.needsToReadFile(), - .WTFStringImpl, .InternalBlob => false, - }; - } - - pub fn isS3(self: *const @This()) bool { - return switch (self.*) { - .Blob => self.Blob.isS3(), - .WTFStringImpl, .InternalBlob => false, - }; - } - - pub fn detach(self: *@This()) void { - return switch (self.*) { - .Blob => { - self.Blob.detach(); - self.* = .{ - .Blob = .{}, - }; - }, - // .InlineBlob => { - // self.InlineBlob.len = 0; - // }, - .InternalBlob => { - self.InternalBlob.bytes.clearAndFree(); - self.* = .{ .Blob = .{} }; - }, - .WTFStringImpl => { - self.WTFStringImpl.deref(); - self.* = .{ .Blob = .{} }; - }, - }; - } -}; - -/// A single-use Blob -pub const InternalBlob = struct { - bytes: std.ArrayList(u8), - was_string: bool = false, - - pub fn memoryCost(this: *const @This()) usize { - return this.bytes.capacity; - } - - pub fn toStringOwned(this: *@This(), globalThis: *JSC.JSGlobalObject) JSValue { - const bytes_without_bom = strings.withoutUTF8BOM(this.bytes.items); - if (strings.toUTF16Alloc(globalThis.allocator(), bytes_without_bom, false, false) catch &[_]u16{}) |out| { - const return_value = ZigString.toExternalU16(out.ptr, out.len, globalThis); - return_value.ensureStillAlive(); - this.deinit(); - return return_value; - } else if - // If there was a UTF8 BOM, we clone it - (bytes_without_bom.len != this.bytes.items.len) { - defer this.deinit(); - var out = bun.String.createLatin1(this.bytes.items[3..]); - defer out.deref(); - return out.toJS(globalThis); - } else { - var str = ZigString.init(this.toOwnedSlice()); - str.mark(); - return str.toExternalValue(globalThis); - } - } - - pub fn toJSON(this: *@This(), globalThis: *JSC.JSGlobalObject) JSValue { - const str_bytes = ZigString.init(strings.withoutUTF8BOM(this.bytes.items)).withEncoding(); - const json = str_bytes.toJSONObject(globalThis); - this.deinit(); - return json; - } - - pub inline fn sliceConst(this: *const @This()) []const u8 { - return this.bytes.items; - } - - pub fn deinit(this: *@This()) void { - this.bytes.clearAndFree(); - } - - pub inline fn slice(this: @This()) []u8 { - return this.bytes.items; - } - - pub fn toOwnedSlice(this: *@This()) []u8 { - const bytes = this.bytes.items; - this.bytes.items = &.{}; - this.bytes.capacity = 0; - return bytes; - } - - pub fn clearAndFree(this: *@This()) void { - this.bytes.clearAndFree(); - } - - pub fn contentType(self: *const @This()) []const u8 { - if (self.was_string) { - return MimeType.text.value; - } - - return MimeType.other.value; - } -}; - -/// A blob which stores all the data in the same space as a real Blob -/// This is an optimization for small Response and Request bodies -/// It means that we can avoid an additional heap allocation for a small response -pub const InlineBlob = extern struct { - const real_blob_size = @sizeOf(Blob); - pub const IntSize = u8; - pub const available_bytes = real_blob_size - @sizeOf(IntSize) - 1 - 1; - bytes: [available_bytes]u8 align(1) = undefined, - len: IntSize align(1) = 0, - was_string: bool align(1) = false, - - pub fn concat(first: []const u8, second: []const u8) InlineBlob { - const total = first.len + second.len; - assert(total <= available_bytes); - - var inline_blob: JSC.WebCore.InlineBlob = .{}; - var bytes_slice = inline_blob.bytes[0..total]; - - if (first.len > 0) - @memcpy(bytes_slice[0..first.len], first); - - if (second.len > 0) - @memcpy(bytes_slice[first.len..][0..second.len], second); - - inline_blob.len = @as(@TypeOf(inline_blob.len), @truncate(total)); - return inline_blob; - } - - fn internalInit(data: []const u8, was_string: bool) InlineBlob { - assert(data.len <= available_bytes); - - var blob = InlineBlob{ - .len = @as(IntSize, @intCast(data.len)), - .was_string = was_string, - }; - - if (data.len > 0) - @memcpy(blob.bytes[0..data.len], data); - return blob; - } - - pub fn init(data: []const u8) InlineBlob { - return internalInit(data, false); - } - - pub fn initString(data: []const u8) InlineBlob { - return internalInit(data, true); - } - - pub fn toStringOwned(this: *@This(), globalThis: *JSC.JSGlobalObject) JSValue { - if (this.len == 0) - return ZigString.Empty.toJS(globalThis); - - var str = ZigString.init(this.sliceConst()); - - if (!strings.isAllASCII(this.sliceConst())) { - str.markUTF8(); - } - - const out = str.toJS(globalThis); - out.ensureStillAlive(); - this.len = 0; - return out; - } - - pub fn contentType(self: *const @This()) []const u8 { - if (self.was_string) { - return MimeType.text.value; - } - - return MimeType.other.value; - } - - pub fn deinit(_: *@This()) void {} - - pub inline fn slice(this: *@This()) []u8 { - return this.bytes[0..this.len]; - } - - pub inline fn sliceConst(this: *const @This()) []const u8 { - return this.bytes[0..this.len]; - } - - pub fn toOwnedSlice(this: *@This()) []u8 { - return this.slice(); - } - - pub fn clearAndFree(_: *@This()) void {} -}; - -const assert = bun.assert; - -pub export fn JSDOMFile__hasInstance(_: JSC.JSValue, _: *JSC.JSGlobalObject, value: JSC.JSValue) callconv(JSC.conv) bool { - JSC.markBinding(@src()); - const blob = value.as(Blob) orelse return false; - return blob.is_jsdom_file; -} diff --git a/src/bun.js/webcore/blob/Store.zig b/src/bun.js/webcore/blob/Store.zig new file mode 100644 index 0000000000..d447d2abe0 --- /dev/null +++ b/src/bun.js/webcore/blob/Store.zig @@ -0,0 +1,574 @@ +const Store = @This(); + +data: Data, + +mime_type: MimeType = .none, +ref_count: std.atomic.Value(u32) = .init(1), +is_all_ascii: ?bool = null, +allocator: std.mem.Allocator, + +pub const new = bun.TrivialNew(@This()); + +pub fn memoryCost(this: *const Store) usize { + return if (this.hasOneRef()) @sizeOf(@This()) + switch (this.data) { + .bytes => this.data.bytes.len, + .file => 0, + .s3 => |s3| s3.estimatedSize(), + } else 0; +} + +pub fn getPath(this: *const Store) ?[]const u8 { + return switch (this.data) { + .bytes => |*bytes| if (bytes.stored_name.len > 0) bytes.stored_name.slice() else null, + .file => |*file| if (file.pathlike == .path) file.pathlike.path.slice() else null, + .s3 => |*s3| s3.pathlike.slice(), + }; +} + +pub fn size(this: *const Store) SizeType { + return switch (this.data) { + .bytes => this.data.bytes.len, + .s3, .file => Blob.max_size, + }; +} + +pub const Map = std.HashMap(u64, *JSC.WebCore.Blob.Store, bun.IdentityContext(u64), 80); + +pub const Data = union(enum) { + bytes: Bytes, + file: File, + s3: S3, +}; + +pub fn ref(this: *Store) void { + const old = this.ref_count.fetchAdd(1, .monotonic); + assert(old > 0); +} + +pub fn hasOneRef(this: *const Store) bool { + return this.ref_count.load(.monotonic) == 1; +} + +/// Caller is responsible for derefing the Store. +pub fn toAnyBlob(this: *Store) ?Blob.Any { + if (this.hasOneRef()) { + if (this.data == .bytes) { + return .{ .InternalBlob = this.data.bytes.toInternalBlob() }; + } + } + + return null; +} + +pub fn external(ptr: ?*anyopaque, _: ?*anyopaque, _: usize) callconv(.C) void { + if (ptr == null) return; + var this = bun.cast(*Store, ptr); + this.deref(); +} +pub fn initS3WithReferencedCredentials(pathlike: node.PathLike, mime_type: ?MimeType, credentials: *bun.S3.S3Credentials, allocator: std.mem.Allocator) !*Store { + var path = pathlike; + // this actually protects/refs the pathlike + path.toThreadSafe(); + + const store = Blob.Store.new(.{ + .data = .{ + .s3 = S3.initWithReferencedCredentials( + path, + mime_type orelse brk: { + const sliced = path.slice(); + if (sliced.len > 0) { + var extname = std.fs.path.extension(sliced); + extname = std.mem.trim(u8, extname, "."); + if (MimeType.byExtensionNoDefault(extname)) |mime| { + break :brk mime; + } + } + break :brk null; + }, + credentials, + ), + }, + .allocator = allocator, + .ref_count = std.atomic.Value(u32).init(1), + }); + return store; +} + +pub fn initS3(pathlike: node.PathLike, mime_type: ?MimeType, credentials: bun.S3.S3Credentials, allocator: std.mem.Allocator) !*Store { + var path = pathlike; + // this actually protects/refs the pathlike + path.toThreadSafe(); + + const store = Blob.Store.new(.{ + .data = .{ + .s3 = S3.init( + path, + mime_type orelse brk: { + const sliced = path.slice(); + if (sliced.len > 0) { + var extname = std.fs.path.extension(sliced); + extname = std.mem.trim(u8, extname, "."); + if (MimeType.byExtensionNoDefault(extname)) |mime| { + break :brk mime; + } + } + break :brk null; + }, + credentials, + ), + }, + .allocator = allocator, + .ref_count = std.atomic.Value(u32).init(1), + }); + return store; +} +pub fn initFile(pathlike: JSC.Node.PathOrFileDescriptor, mime_type: ?MimeType, allocator: std.mem.Allocator) !*Store { + const store = Blob.Store.new(.{ + .data = .{ + .file = File.init( + pathlike, + mime_type orelse brk: { + if (pathlike == .path) { + const sliced = pathlike.path.slice(); + if (sliced.len > 0) { + var extname = std.fs.path.extension(sliced); + extname = std.mem.trim(u8, extname, "."); + if (MimeType.byExtensionNoDefault(extname)) |mime| { + break :brk mime; + } + } + } + + break :brk null; + }, + ), + }, + .allocator = allocator, + .ref_count = std.atomic.Value(u32).init(1), + }); + return store; +} + +/// Takes ownership of `bytes`, which must have been allocated with `allocator`. +pub fn init(bytes: []u8, allocator: std.mem.Allocator) *Store { + const store = Blob.Store.new(.{ + .data = .{ + .bytes = Bytes.init(bytes, allocator), + }, + .allocator = allocator, + .ref_count = .init(1), + }); + return store; +} + +pub fn sharedView(this: Store) []u8 { + if (this.data == .bytes) + return this.data.bytes.slice(); + + return &[_]u8{}; +} + +pub fn deref(this: *Blob.Store) void { + const old = this.ref_count.fetchSub(1, .monotonic); + assert(old >= 1); + if (old == 1) { + this.deinit(); + } +} + +pub fn deinit(this: *Blob.Store) void { + const allocator = this.allocator; + + switch (this.data) { + .bytes => |*bytes| { + bytes.deinit(); + }, + .file => |file| { + if (file.pathlike == .path) { + if (file.pathlike.path == .string) { + allocator.free(@constCast(file.pathlike.path.slice())); + } else { + file.pathlike.path.deinit(); + } + } + }, + .s3 => |*s3| { + s3.deinit(allocator); + }, + } + + bun.destroy(this); +} + +pub const SerializeTag = enum(u8) { + file = 0, + bytes = 1, + empty = 2, +}; + +pub fn serialize(this: *Store, comptime Writer: type, writer: Writer) !void { + switch (this.data) { + .file => |file| { + const pathlike_tag: JSC.Node.PathOrFileDescriptor.SerializeTag = if (file.pathlike == .fd) .fd else .path; + try writer.writeInt(u8, @intFromEnum(pathlike_tag), .little); + + switch (file.pathlike) { + .fd => |fd| { + try writer.writeStruct(fd); + }, + .path => |path| { + const path_slice = path.slice(); + try writer.writeInt(u32, @as(u32, @truncate(path_slice.len)), .little); + try writer.writeAll(path_slice); + }, + } + }, + .s3 => |s3| { + const pathlike_tag: JSC.Node.PathOrFileDescriptor.SerializeTag = .path; + try writer.writeInt(u8, @intFromEnum(pathlike_tag), .little); + + const path_slice = s3.pathlike.slice(); + try writer.writeInt(u32, @as(u32, @truncate(path_slice.len)), .little); + try writer.writeAll(path_slice); + }, + .bytes => |bytes| { + const slice = bytes.slice(); + try writer.writeInt(u32, @truncate(slice.len), .little); + try writer.writeAll(slice); + + try writer.writeInt(u32, @truncate(bytes.stored_name.slice().len), .little); + try writer.writeAll(bytes.stored_name.slice()); + }, + } +} + +pub fn fromArrayList(list: std.ArrayListUnmanaged(u8), allocator: std.mem.Allocator) !*Blob.Store { + return try Blob.Store.init(list.items, allocator); +} + +/// A blob store that references a file on disk. +pub const File = struct { + pathlike: JSC.Node.PathOrFileDescriptor, + mime_type: MimeType = MimeType.other, + is_atty: ?bool = null, + mode: bun.Mode = 0, + seekable: ?bool = null, + max_size: SizeType = Blob.max_size, + // milliseconds since ECMAScript epoch + last_modified: JSC.JSTimeType = JSC.init_timestamp, + + pub fn unlink(this: *const File, globalThis: *JSGlobalObject) bun.JSError!JSValue { + return switch (this.pathlike) { + .path => |path_like| JSC.Node.fs.Async.unlink.create(globalThis, undefined, .{ + .path = .{ + .encoded_slice = switch (path_like) { + .encoded_slice => |slice| try slice.toOwned(bun.default_allocator), + else => try JSC.ZigString.init(path_like.slice()).toSliceClone(bun.default_allocator), + }, + }, + }, globalThis.bunVM()), + .fd => JSC.JSPromise.resolvedPromiseValue(globalThis, globalThis.createInvalidArgs("Is not possible to unlink a file descriptor", .{})), + }; + } + pub fn isSeekable(this: *const File) ?bool { + if (this.seekable) |seekable| { + return seekable; + } + + if (this.mode != 0) { + return bun.isRegularFile(this.mode); + } + + return null; + } + + pub fn init(pathlike: JSC.Node.PathOrFileDescriptor, mime_type: ?MimeType) File { + return .{ .pathlike = pathlike, .mime_type = mime_type orelse MimeType.other }; + } +}; + +/// An S3 Blob Store +pub const S3 = struct { + pathlike: node.PathLike, + mime_type: MimeType = .other, + credentials: ?*S3Credentials, + options: bun.S3.MultiPartUploadOptions = .{}, + acl: ?bun.S3.ACL = null, + storage_class: ?bun.S3.StorageClass = null, + + pub fn isSeekable(_: *const @This()) ?bool { + return true; + } + + pub fn getCredentials(this: *const @This()) *S3Credentials { + bun.assert(this.credentials != null); + return this.credentials.?; + } + + pub fn getCredentialsWithOptions(this: *const @This(), options: ?JSValue, globalObject: *JSGlobalObject) bun.JSError!bun.S3.S3CredentialsWithOptions { + return S3Credentials.getCredentialsWithOptions(this.getCredentials().*, this.options, options, this.acl, this.storage_class, globalObject); + } + + pub fn path(this: *@This()) []const u8 { + var path_name = bun.URL.parse(this.pathlike.slice()).s3Path(); + // normalize start and ending + if (strings.endsWith(path_name, "/")) { + path_name = path_name[0..path_name.len]; + } else if (strings.endsWith(path_name, "\\")) { + path_name = path_name[0 .. path_name.len - 1]; + } + if (strings.startsWith(path_name, "/")) { + path_name = path_name[1..]; + } else if (strings.startsWith(path_name, "\\")) { + path_name = path_name[1..]; + } + return path_name; + } + + pub fn unlink(this: *@This(), store: *Store, globalThis: *JSGlobalObject, extra_options: ?JSValue) bun.JSError!JSValue { + const Wrapper = struct { + promise: JSC.JSPromise.Strong, + store: *Store, + global: *JSGlobalObject, + + pub const new = bun.TrivialNew(@This()); + + pub fn resolve(result: bun.S3.S3DeleteResult, opaque_self: *anyopaque) void { + const self: *@This() = @ptrCast(@alignCast(opaque_self)); + defer self.deinit(); + const globalObject = self.global; + switch (result) { + .success => { + self.promise.resolve(globalObject, .true); + }, + .not_found, .failure => |err| { + self.promise.reject(globalObject, err.toJS(globalObject, self.store.getPath())); + }, + } + } + + fn deinit(wrap: *@This()) void { + wrap.store.deref(); + wrap.promise.deinit(); + bun.destroy(wrap); + } + }; + const promise = JSC.JSPromise.Strong.init(globalThis); + const value = promise.value(); + const proxy_url = globalThis.bunVM().transpiler.env.getHttpProxy(true, null); + const proxy = if (proxy_url) |url| url.href else null; + var aws_options = try this.getCredentialsWithOptions(extra_options, globalThis); + defer aws_options.deinit(); + bun.S3.delete(&aws_options.credentials, this.path(), @ptrCast(&Wrapper.resolve), Wrapper.new(.{ + .promise = promise, + .store = store, // store is needed in case of not found error + .global = globalThis, + }), proxy); + store.ref(); + + return value; + } + + pub fn listObjects(this: *@This(), store: *Store, globalThis: *JSGlobalObject, listOptions: JSValue, extra_options: ?JSValue) bun.JSError!JSValue { + if (!listOptions.isEmptyOrUndefinedOrNull() and !listOptions.isObject()) { + return globalThis.throwInvalidArguments("S3Client.listObjects() needs a S3ListObjectsOption as it's first argument", .{}); + } + + const Wrapper = struct { + promise: JSC.JSPromise.Strong, + store: *Store, + resolvedlistOptions: bun.S3.S3ListObjectsOptions, + global: *JSGlobalObject, + + pub fn resolve(result: bun.S3.S3ListObjectsResult, opaque_self: *anyopaque) void { + const self: *@This() = @ptrCast(@alignCast(opaque_self)); + defer self.deinit(); + const globalObject = self.global; + + switch (result) { + .success => |list_result| { + defer list_result.deinit(); + self.promise.resolve(globalObject, list_result.toJS(globalObject)); + }, + + inline .not_found, .failure => |err| { + self.promise.reject(globalObject, err.toJS(globalObject, self.store.getPath())); + }, + } + } + + fn deinit(self: *@This()) void { + self.store.deref(); + self.promise.deinit(); + self.resolvedlistOptions.deinit(); + self.destroy(); + } + + pub inline fn destroy(self: *@This()) void { + bun.destroy(self); + } + }; + + const promise = JSC.JSPromise.Strong.init(globalThis); + const value = promise.value(); + const proxy_url = globalThis.bunVM().transpiler.env.getHttpProxy(true, null); + const proxy = if (proxy_url) |url| url.href else null; + var aws_options = try this.getCredentialsWithOptions(extra_options, globalThis); + defer aws_options.deinit(); + + const options = bun.S3.getListObjectsOptionsFromJS(globalThis, listOptions) catch bun.outOfMemory(); + store.ref(); + + bun.S3.listObjects(&aws_options.credentials, options, @ptrCast(&Wrapper.resolve), bun.new(Wrapper, .{ + .promise = promise, + .store = store, // store is needed in case of not found error + .resolvedlistOptions = options, + .global = globalThis, + }), proxy); + + return value; + } + + pub fn initWithReferencedCredentials(pathlike: node.PathLike, mime_type: ?MimeType, credentials: *S3Credentials) S3 { + credentials.ref(); + return .{ + .credentials = credentials, + .pathlike = pathlike, + .mime_type = mime_type orelse MimeType.other, + }; + } + pub fn init(pathlike: node.PathLike, mime_type: ?MimeType, credentials: S3Credentials) S3 { + return .{ + .credentials = credentials.dupe(), + .pathlike = pathlike, + .mime_type = mime_type orelse MimeType.other, + }; + } + pub fn estimatedSize(this: *const @This()) usize { + return this.pathlike.estimatedSize() + if (this.credentials) |credentials| credentials.estimatedSize() else 0; + } + + pub fn deinit(this: *@This(), allocator: std.mem.Allocator) void { + if (this.pathlike == .string) { + allocator.free(@constCast(this.pathlike.slice())); + } else { + this.pathlike.deinit(); + } + this.pathlike = .{ + .string = bun.PathString.empty, + }; + if (this.credentials) |credentials| { + credentials.deref(); + this.credentials = null; + } + } + + const S3Credentials = bun.S3.S3Credentials; +}; + +pub const Bytes = struct { + ptr: ?[*]u8 = undefined, + len: SizeType = 0, + cap: SizeType = 0, + allocator: std.mem.Allocator, + + /// Used by standalone module graph and the File constructor + stored_name: bun.PathString = bun.PathString.empty, + + /// Takes ownership of `bytes`, which must have been allocated with + /// `allocator`. + pub fn init(bytes: []u8, allocator: std.mem.Allocator) Bytes { + return .{ + .ptr = bytes.ptr, + .len = @as(SizeType, @truncate(bytes.len)), + .cap = @as(SizeType, @truncate(bytes.len)), + .allocator = allocator, + }; + } + pub fn initEmptyWithName(name: bun.PathString, allocator: std.mem.Allocator) Bytes { + return .{ + .ptr = null, + .len = 0, + .cap = 0, + .allocator = allocator, + .stored_name = name, + }; + } + + pub fn fromArrayList(list: std.ArrayListUnmanaged(u8), allocator: std.mem.Allocator) !*Bytes { + return Bytes.init(list.items, allocator); + } + + pub fn toInternalBlob(this: *Bytes) Blob.Internal { + const ptr = this.ptr orelse return .{ + .bytes = std.ArrayList(u8){ + .items = &.{}, + .capacity = 0, + .allocator = this.allocator, + }, + }; + + const result: Blob.Internal = .{ + .bytes = .{ + .items = ptr[0..this.len], + .capacity = this.cap, + .allocator = this.allocator, + }, + }; + + this.allocator = bun.default_allocator; + this.len = 0; + this.cap = 0; + return result; + } + pub fn slice(this: Bytes) []u8 { + if (this.ptr) |ptr| { + return ptr[0..this.len]; + } + return ""; + } + + pub fn allocatedSlice(this: Bytes) []u8 { + if (this.ptr) |ptr| { + return ptr[0..this.cap]; + } + return ""; + } + + pub fn deinit(this: *Bytes) void { + bun.default_allocator.free(this.stored_name.slice()); + if (this.ptr) |ptr| { + this.allocator.free(ptr[0..this.cap]); + } + this.ptr = null; + this.len = 0; + this.cap = 0; + } + + pub fn asArrayList(this: Bytes) std.ArrayListUnmanaged(u8) { + return this.asArrayListLeak(); + } + + pub fn asArrayListLeak(this: Bytes) std.ArrayListUnmanaged(u8) { + return .{ + .items = this.ptr[0..this.len], + .capacity = this.cap, + }; + } +}; + +const std = @import("std"); +const bun = @import("bun"); +const strings = bun.strings; +const assert = bun.assert; +const MimeType = bun.http.MimeType; + +const JSC = bun.JSC; +const JSGlobalObject = JSC.JSGlobalObject; +const JSValue = JSC.JSValue; + +const node = bun.api.node; + +const webcore = bun.webcore; +const Blob = webcore.Blob; +const SizeType = Blob.SizeType; diff --git a/src/bun.js/webcore/blob/copy_file.zig b/src/bun.js/webcore/blob/copy_file.zig new file mode 100644 index 0000000000..236f95267e --- /dev/null +++ b/src/bun.js/webcore/blob/copy_file.zig @@ -0,0 +1,1163 @@ +// blocking, but off the main thread +pub const CopyFile = struct { + destination_file_store: Store.File, + source_file_store: Store.File, + store: ?*Store = null, + source_store: ?*Store = null, + offset: SizeType = 0, + size: SizeType = 0, + max_length: SizeType = Blob.max_size, + destination_fd: bun.FileDescriptor = bun.invalid_fd, + source_fd: bun.FileDescriptor = bun.invalid_fd, + + system_error: ?SystemError = null, + + read_len: SizeType = 0, + read_off: SizeType = 0, + + globalThis: *JSGlobalObject, + + mkdirp_if_not_exists: bool = false, + + pub const ResultType = anyerror!SizeType; + + pub const Callback = *const fn (ctx: *anyopaque, len: ResultType) void; + + pub fn create( + allocator: std.mem.Allocator, + store: *Store, + source_store: *Store, + off: SizeType, + max_len: SizeType, + globalThis: *JSGlobalObject, + mkdirp_if_not_exists: bool, + ) !*CopyFilePromiseTask { + const read_file = bun.new(CopyFile, CopyFile{ + .store = store, + .source_store = source_store, + .offset = off, + .max_length = max_len, + .globalThis = globalThis, + .destination_file_store = store.data.file, + .source_file_store = source_store.data.file, + .mkdirp_if_not_exists = mkdirp_if_not_exists, + }); + store.ref(); + source_store.ref(); + return CopyFilePromiseTask.createOnJSThread(allocator, globalThis, read_file) catch bun.outOfMemory(); + } + + const linux = std.os.linux; + const darwin = std.posix.system; + + pub fn deinit(this: *CopyFile) void { + if (this.source_file_store.pathlike == .path) { + if (this.source_file_store.pathlike.path == .string and this.system_error == null) { + bun.default_allocator.free(@constCast(this.source_file_store.pathlike.path.slice())); + } + } + this.store.?.deref(); + + bun.destroy(this); + } + + pub fn reject(this: *CopyFile, promise: *JSC.JSPromise) void { + const globalThis = this.globalThis; + var system_error: SystemError = this.system_error orelse SystemError{}; + if (this.source_file_store.pathlike == .path and system_error.path.isEmpty()) { + system_error.path = bun.String.createUTF8(this.source_file_store.pathlike.path.slice()); + } + + if (system_error.message.isEmpty()) { + system_error.message = bun.String.static("Failed to copy file"); + } + + const instance = system_error.toErrorInstance(this.globalThis); + if (this.store) |store| { + store.deref(); + } + promise.reject(globalThis, instance); + } + + pub fn then(this: *CopyFile, promise: *JSC.JSPromise) void { + this.source_store.?.deref(); + + if (this.system_error != null) { + this.reject(promise); + return; + } + + promise.resolve(this.globalThis, JSC.JSValue.jsNumberFromUint64(this.read_len)); + } + + pub fn run(this: *CopyFile) void { + this.runAsync(); + } + + pub fn doClose(this: *CopyFile) void { + const close_input = this.destination_file_store.pathlike != .fd and this.destination_fd != bun.invalid_fd; + const close_output = this.source_file_store.pathlike != .fd and this.source_fd != bun.invalid_fd; + + if (close_input and close_output) { + this.doCloseFile(.both); + } else if (close_input) { + this.doCloseFile(.destination); + } else if (close_output) { + this.doCloseFile(.source); + } + } + + const posix = std.posix; + + pub fn doCloseFile(this: *CopyFile, comptime which: IOWhich) void { + switch (which) { + .both => { + this.destination_fd.close(); + this.source_fd.close(); + }, + .destination => { + this.destination_fd.close(); + }, + .source => { + this.source_fd.close(); + }, + } + } + + const O = bun.O; + const open_destination_flags = O.CLOEXEC | O.CREAT | O.WRONLY | O.TRUNC; + const open_source_flags = O.CLOEXEC | O.RDONLY; + + pub fn doOpenFile(this: *CopyFile, comptime which: IOWhich) !void { + var path_buf1: bun.PathBuffer = undefined; + // open source file first + // if it fails, we don't want the extra destination file hanging out + if (which == .both or which == .source) { + this.source_fd = switch (bun.sys.open( + this.source_file_store.pathlike.path.sliceZ(&path_buf1), + open_source_flags, + 0, + )) { + .result => |result| switch (result.makeLibUVOwnedForSyscall(.open, .close_on_fail)) { + .result => |result_fd| result_fd, + .err => |errno| { + this.system_error = errno.toSystemError(); + return bun.errnoToZigErr(errno.errno); + }, + }, + .err => |errno| { + this.system_error = errno.toSystemError(); + return bun.errnoToZigErr(errno.errno); + }, + }; + } + + if (which == .both or which == .destination) { + while (true) { + const dest = this.destination_file_store.pathlike.path.sliceZ(&path_buf1); + this.destination_fd = switch (bun.sys.open( + dest, + open_destination_flags, + JSC.Node.fs.default_permission, + )) { + .result => |result| switch (result.makeLibUVOwnedForSyscall(.open, .close_on_fail)) { + .result => |result_fd| result_fd, + .err => |errno| { + this.system_error = errno.toSystemError(); + return bun.errnoToZigErr(errno.errno); + }, + }, + .err => |errno| { + switch (Blob.mkdirIfNotExists(this, errno, dest, dest)) { + .@"continue" => continue, + .fail => { + if (which == .both) { + this.source_fd.close(); + this.source_fd = .invalid; + } + return bun.errnoToZigErr(errno.errno); + }, + .no => {}, + } + + if (which == .both) { + this.source_fd.close(); + this.source_fd = .invalid; + } + + this.system_error = errno.withPath(this.destination_file_store.pathlike.path.slice()).toSystemError(); + return bun.errnoToZigErr(errno.errno); + }, + }; + break; + } + } + } + + const TryWith = enum { + sendfile, + copy_file_range, + splice, + + pub const tag = std.EnumMap(TryWith, bun.sys.Tag).init(.{ + .sendfile = .sendfile, + .copy_file_range = .copy_file_range, + .splice = .splice, + }); + }; + + pub fn doCopyFileRange( + this: *CopyFile, + comptime use: TryWith, + comptime clear_append_if_invalid: bool, + ) anyerror!void { + this.read_off += this.offset; + + var remain = @as(usize, this.max_length); + const unknown_size = remain == Blob.max_size or remain == 0; + if (unknown_size) { + // sometimes stat lies + // let's give it 4096 and see how it goes + remain = 4096; + } + + var total_written: usize = 0; + const src_fd = this.source_fd; + const dest_fd = this.destination_fd; + + defer { + this.read_len = @as(SizeType, @truncate(total_written)); + } + + var has_unset_append = false; + + // If they can't use copy_file_range, they probably also can't + // use sendfile() or splice() + if (!bun.canUseCopyFileRangeSyscall()) { + switch (JSC.Node.fs.NodeFS.copyFileUsingReadWriteLoop("", "", src_fd, dest_fd, if (unknown_size) 0 else remain, &total_written)) { + .err => |err| { + this.system_error = err.toSystemError(); + return bun.errnoToZigErr(err.errno); + }, + .result => { + _ = linux.ftruncate(dest_fd.cast(), @as(std.posix.off_t, @intCast(total_written))); + return; + }, + } + } + + while (true) { + // TODO: this should use non-blocking I/O. + const written = switch (comptime use) { + .copy_file_range => linux.copy_file_range(src_fd.cast(), null, dest_fd.cast(), null, remain, 0), + .sendfile => linux.sendfile(dest_fd.cast(), src_fd.cast(), null, remain), + .splice => bun.linux.splice(src_fd.cast(), null, dest_fd.cast(), null, remain, 0), + }; + + switch (bun.sys.getErrno(written)) { + .SUCCESS => {}, + + .NOSYS, .XDEV => { + // TODO: this should use non-blocking I/O. + switch (JSC.Node.fs.NodeFS.copyFileUsingReadWriteLoop("", "", src_fd, dest_fd, if (unknown_size) 0 else remain, &total_written)) { + .err => |err| { + this.system_error = err.toSystemError(); + return bun.errnoToZigErr(err.errno); + }, + .result => { + _ = linux.ftruncate(dest_fd.cast(), @as(std.posix.off_t, @intCast(total_written))); + return; + }, + } + }, + + .INVAL => { + if (comptime clear_append_if_invalid) { + if (!has_unset_append) { + // https://kylelaker.com/2018/08/31/stdout-oappend.html + // make() can set STDOUT / STDERR to O_APPEND + // this messes up sendfile() + has_unset_append = true; + const flags = linux.fcntl(dest_fd.cast(), linux.F.GETFL, @as(c_int, 0)); + if ((flags & O.APPEND) != 0) { + _ = linux.fcntl(dest_fd.cast(), linux.F.SETFL, flags ^ O.APPEND); + continue; + } + } + } + + // If the Linux machine doesn't support + // copy_file_range or the file descrpitor is + // incompatible with the chosen syscall, fall back + // to a read/write loop + if (total_written == 0) { + // TODO: this should use non-blocking I/O. + switch (JSC.Node.fs.NodeFS.copyFileUsingReadWriteLoop("", "", src_fd, dest_fd, if (unknown_size) 0 else remain, &total_written)) { + .err => |err| { + this.system_error = err.toSystemError(); + return bun.errnoToZigErr(err.errno); + }, + .result => { + _ = linux.ftruncate(dest_fd.cast(), @as(std.posix.off_t, @intCast(total_written))); + return; + }, + } + } + + this.system_error = (bun.sys.Error{ + .errno = @as(bun.sys.Error.Int, @intCast(@intFromEnum(linux.E.INVAL))), + .syscall = TryWith.tag.get(use).?, + }).toSystemError(); + return bun.errnoToZigErr(linux.E.INVAL); + }, + else => |errno| { + this.system_error = (bun.sys.Error{ + .errno = @as(bun.sys.Error.Int, @intCast(@intFromEnum(errno))), + .syscall = TryWith.tag.get(use).?, + }).toSystemError(); + return bun.errnoToZigErr(errno); + }, + } + + // wrote zero bytes means EOF + remain -|= @intCast(written); + total_written += @intCast(written); + if (written == 0 or remain == 0) break; + } + } + + pub fn doFCopyFileWithReadWriteLoopFallback(this: *CopyFile) anyerror!void { + switch (bun.sys.fcopyfile(this.source_fd, this.destination_fd, posix.system.COPYFILE{ .DATA = true })) { + .err => |errno| { + switch (errno.getErrno()) { + // If the file type doesn't support seeking, it may return EBADF + // Example case: + // + // bun test bun-write.test | xargs echo + // + .BADF => { + var total_written: u64 = 0; + + // TODO: this should use non-blocking I/O. + switch (JSC.Node.fs.NodeFS.copyFileUsingReadWriteLoop("", "", this.source_fd, this.destination_fd, 0, &total_written)) { + .err => |err| { + this.system_error = err.toSystemError(); + return bun.errnoToZigErr(err.errno); + }, + .result => {}, + } + }, + else => { + this.system_error = errno.toSystemError(); + + return bun.errnoToZigErr(errno.errno); + }, + } + }, + .result => {}, + } + } + + pub fn doClonefile(this: *CopyFile) anyerror!void { + var source_buf: bun.PathBuffer = undefined; + var dest_buf: bun.PathBuffer = undefined; + + while (true) { + const dest = this.destination_file_store.pathlike.path.sliceZ( + &dest_buf, + ); + switch (bun.sys.clonefile( + this.source_file_store.pathlike.path.sliceZ(&source_buf), + dest, + )) { + .err => |errno| { + switch (Blob.mkdirIfNotExists(this, errno, dest, this.destination_file_store.pathlike.path.slice())) { + .@"continue" => continue, + .fail => {}, + .no => {}, + } + this.system_error = errno.toSystemError(); + return bun.errnoToZigErr(errno.errno); + }, + .result => {}, + } + break; + } + } + + pub fn runAsync(this: *CopyFile) void { + if (Environment.isWindows) return; //why + // defer task.onFinish(); + + var stat_: ?bun.Stat = null; + + if (this.destination_file_store.pathlike == .fd) { + this.destination_fd = this.destination_file_store.pathlike.fd; + } + + if (this.source_file_store.pathlike == .fd) { + this.source_fd = this.source_file_store.pathlike.fd; + } + + // Do we need to open both files? + if (this.destination_fd == bun.invalid_fd and this.source_fd == bun.invalid_fd) { + + // First, we attempt to clonefile() on macOS + // This is the fastest way to copy a file. + if (comptime Environment.isMac) { + if (this.offset == 0 and this.source_file_store.pathlike == .path and this.destination_file_store.pathlike == .path) { + do_clonefile: { + var path_buf: bun.PathBuffer = undefined; + + // stat the output file, make sure it: + // 1. Exists + switch (bun.sys.stat(this.source_file_store.pathlike.path.sliceZ(&path_buf))) { + .result => |result| { + stat_ = result; + + if (posix.S.ISDIR(result.mode)) { + this.system_error = unsupported_directory_error; + return; + } + + if (!posix.S.ISREG(result.mode)) + break :do_clonefile; + }, + .err => |err| { + // If we can't stat it, we also can't copy it. + this.system_error = err.toSystemError(); + return; + }, + } + + if (this.doClonefile()) { + if (this.max_length != Blob.max_size and this.max_length < @as(SizeType, @intCast(stat_.?.size))) { + // If this fails...well, there's not much we can do about it. + _ = bun.c.truncate( + this.destination_file_store.pathlike.path.sliceZ(&path_buf), + @as(std.posix.off_t, @intCast(this.max_length)), + ); + this.read_len = @as(SizeType, @intCast(this.max_length)); + } else { + this.read_len = @as(SizeType, @intCast(stat_.?.size)); + } + return; + } else |_| { + + // this may still fail, in which case we just continue trying with fcopyfile + // it can fail when the input file already exists + // or if the output is not a directory + // or if it's a network volume + this.system_error = null; + } + } + } + } + + this.doOpenFile(.both) catch return; + // Do we need to open only one file? + } else if (this.destination_fd == bun.invalid_fd) { + this.source_fd = this.source_file_store.pathlike.fd; + + this.doOpenFile(.destination) catch return; + // Do we need to open only one file? + } else if (this.source_fd == bun.invalid_fd) { + this.destination_fd = this.destination_file_store.pathlike.fd; + + this.doOpenFile(.source) catch return; + } + + if (this.system_error != null) { + return; + } + + assert(this.destination_fd.isValid()); + assert(this.source_fd.isValid()); + + if (this.destination_file_store.pathlike == .fd) {} + + const stat: bun.Stat = stat_ orelse switch (bun.sys.fstat(this.source_fd)) { + .result => |result| result, + .err => |err| { + this.doClose(); + this.system_error = err.toSystemError(); + return; + }, + }; + + if (posix.S.ISDIR(stat.mode)) { + this.system_error = unsupported_directory_error; + this.doClose(); + return; + } + + if (stat.size != 0) { + this.max_length = @max(@min(@as(SizeType, @intCast(stat.size)), this.max_length), this.offset) - this.offset; + if (this.max_length == 0) { + this.doClose(); + return; + } + + if (bun.sys.preallocate_supported and + posix.S.ISREG(stat.mode) and + this.max_length > bun.sys.preallocate_length and + this.max_length != Blob.max_size) + { + bun.sys.preallocate_file(this.destination_fd.cast(), 0, this.max_length) catch {}; + } + } + + if (comptime Environment.isLinux) { + + // Bun.write(Bun.file("a"), Bun.file("b")) + if (posix.S.ISREG(stat.mode) and (posix.S.ISREG(this.destination_file_store.mode) or this.destination_file_store.mode == 0)) { + if (this.destination_file_store.is_atty orelse false) { + this.doCopyFileRange(.copy_file_range, true) catch {}; + } else { + this.doCopyFileRange(.copy_file_range, false) catch {}; + } + + this.doClose(); + return; + } + + // $ bun run foo.js | bun run bar.js + if (posix.S.ISFIFO(stat.mode) and posix.S.ISFIFO(this.destination_file_store.mode)) { + if (this.destination_file_store.is_atty orelse false) { + this.doCopyFileRange(.splice, true) catch {}; + } else { + this.doCopyFileRange(.splice, false) catch {}; + } + + this.doClose(); + return; + } + + if (posix.S.ISREG(stat.mode) or posix.S.ISCHR(stat.mode) or posix.S.ISSOCK(stat.mode)) { + if (this.destination_file_store.is_atty orelse false) { + this.doCopyFileRange(.sendfile, true) catch {}; + } else { + this.doCopyFileRange(.sendfile, false) catch {}; + } + + this.doClose(); + return; + } + + this.system_error = unsupported_non_regular_file_error; + this.doClose(); + return; + } + + if (comptime Environment.isMac) { + this.doFCopyFileWithReadWriteLoopFallback() catch { + this.doClose(); + + return; + }; + if (stat.size != 0 and @as(SizeType, @intCast(stat.size)) > this.max_length) { + _ = darwin.ftruncate(this.destination_fd.cast(), @as(std.posix.off_t, @intCast(this.max_length))); + } + + this.doClose(); + } else { + @compileError("TODO: implement copyfile"); + } + } +}; + +pub const CopyFileWindows = struct { + destination_file_store: *Store, + source_file_store: *Store, + + io_request: libuv.fs_t = std.mem.zeroes(libuv.fs_t), + promise: JSC.JSPromise.Strong = .{}, + mkdirp_if_not_exists: bool = false, + event_loop: *JSC.EventLoop, + + size: Blob.SizeType = Blob.max_size, + + /// For mkdirp + err: ?bun.sys.Error = null, + + /// When we are unable to get the original file path, we do a read-write loop that uses libuv. + read_write_loop: ReadWriteLoop = .{}, + + pub const ReadWriteLoop = struct { + source_fd: bun.FileDescriptor = bun.invalid_fd, + must_close_source_fd: bool = false, + destination_fd: bun.FileDescriptor = bun.invalid_fd, + must_close_destination_fd: bool = false, + written: usize = 0, + read_buf: std.ArrayList(u8) = std.ArrayList(u8).init(bun.default_allocator), + uv_buf: libuv.uv_buf_t = .{ .base = undefined, .len = 0 }, + + pub fn start(read_write_loop: *ReadWriteLoop, this: *CopyFileWindows) JSC.Maybe(void) { + read_write_loop.read_buf.ensureTotalCapacityPrecise(64 * 1024) catch bun.outOfMemory(); + + return read(read_write_loop, this); + } + + pub fn read(read_write_loop: *ReadWriteLoop, this: *CopyFileWindows) JSC.Maybe(void) { + read_write_loop.read_buf.items.len = 0; + read_write_loop.uv_buf = libuv.uv_buf_t.init(read_write_loop.read_buf.allocatedSlice()); + const loop = this.event_loop.virtual_machine.event_loop_handle.?; + + // This io_request is used for both reading and writing. + // For now, we don't start reading the next chunk until + // we've finished writing all the previous chunks. + this.io_request.data = @ptrCast(this); + + const rc = libuv.uv_fs_read( + loop, + &this.io_request, + read_write_loop.source_fd.uv(), + @ptrCast(&read_write_loop.uv_buf), + 1, + -1, + &onRead, + ); + + if (rc.toError(.read)) |err| { + return .{ .err = err }; + } + + return .{ .result = {} }; + } + + fn onRead(req: *libuv.fs_t) callconv(.C) void { + var this: *CopyFileWindows = @fieldParentPtr("io_request", req); + bun.assert(req.data == @as(?*anyopaque, @ptrCast(this))); + + const source_fd = this.read_write_loop.source_fd; + const destination_fd = this.read_write_loop.destination_fd; + const read_buf = &this.read_write_loop.read_buf.items; + + const event_loop = this.event_loop; + + const rc = req.result; + + bun.sys.syslog("uv_fs_read({}, {d}) = {d}", .{ source_fd, read_buf.len, rc.int() }); + if (rc.toError(.read)) |err| { + this.err = err; + this.onReadWriteLoopComplete(); + return; + } + + read_buf.len = @intCast(rc.int()); + this.read_write_loop.uv_buf = libuv.uv_buf_t.init(read_buf.*); + + if (rc.int() == 0) { + // Handle EOF. We can't read any more. + this.onReadWriteLoopComplete(); + return; + } + + // Re-use the fs request. + req.deinit(); + const rc2 = libuv.uv_fs_write( + event_loop.virtual_machine.event_loop_handle.?, + &this.io_request, + destination_fd.uv(), + @ptrCast(&this.read_write_loop.uv_buf), + 1, + -1, + &onWrite, + ); + req.data = @ptrCast(this); + + if (rc2.toError(.write)) |err| { + this.err = err; + this.onReadWriteLoopComplete(); + return; + } + } + + fn onWrite(req: *libuv.fs_t) callconv(.C) void { + var this: *CopyFileWindows = @fieldParentPtr("io_request", req); + bun.assert(req.data == @as(?*anyopaque, @ptrCast(this))); + const buf = &this.read_write_loop.read_buf.items; + + const destination_fd = this.read_write_loop.destination_fd; + + const rc = req.result; + + bun.sys.syslog("uv_fs_write({}, {d}) = {d}", .{ destination_fd, buf.len, rc.int() }); + + if (rc.toError(.write)) |err| { + this.err = err; + this.onReadWriteLoopComplete(); + return; + } + + const wrote: u32 = @intCast(rc.int()); + + this.read_write_loop.written += wrote; + + if (wrote < buf.len) { + if (wrote == 0) { + // Handle EOF. We can't write any more. + this.onReadWriteLoopComplete(); + return; + } + + // Re-use the fs request. + req.deinit(); + req.data = @ptrCast(this); + + this.read_write_loop.uv_buf = libuv.uv_buf_t.init(this.read_write_loop.uv_buf.slice()[wrote..]); + const rc2 = libuv.uv_fs_write( + this.event_loop.virtual_machine.event_loop_handle.?, + &this.io_request, + destination_fd.uv(), + @ptrCast(&this.read_write_loop.uv_buf), + 1, + -1, + &onWrite, + ); + + if (rc2.toError(.write)) |err| { + this.err = err; + this.onReadWriteLoopComplete(); + return; + } + + return; + } + + req.deinit(); + switch (this.read_write_loop.read(this)) { + .err => |err| { + this.err = err; + this.onReadWriteLoopComplete(); + }, + .result => {}, + } + } + + pub fn close(this: *ReadWriteLoop) void { + if (this.must_close_source_fd) { + if (this.source_fd.makeLibUVOwned()) |fd| { + bun.Async.Closer.close( + fd, + bun.Async.Loop.get(), + ); + } else |_| { + this.source_fd.close(); + } + this.must_close_source_fd = false; + this.source_fd = bun.invalid_fd; + } + + if (this.must_close_destination_fd) { + if (this.destination_fd.makeLibUVOwned()) |fd| { + bun.Async.Closer.close( + fd, + bun.Async.Loop.get(), + ); + } else |_| { + this.destination_fd.close(); + } + this.must_close_destination_fd = false; + this.destination_fd = bun.invalid_fd; + } + + this.read_buf.clearAndFree(); + } + }; + + pub fn onReadWriteLoopComplete(this: *CopyFileWindows) void { + this.event_loop.unrefConcurrently(); + + if (this.err) |err| { + this.err = null; + this.throw(err); + return; + } + + this.onComplete(this.read_write_loop.written); + } + + pub const new = bun.TrivialNew(@This()); + + pub fn init( + destination_file_store: *Store, + source_file_store: *Store, + event_loop: *JSC.EventLoop, + mkdirp_if_not_exists: bool, + size_: Blob.SizeType, + ) JSC.JSValue { + destination_file_store.ref(); + source_file_store.ref(); + const result = CopyFileWindows.new(.{ + .destination_file_store = destination_file_store, + .source_file_store = source_file_store, + .promise = JSC.JSPromise.Strong.init(event_loop.global), + .io_request = std.mem.zeroes(libuv.fs_t), + .event_loop = event_loop, + .mkdirp_if_not_exists = mkdirp_if_not_exists, + .size = size_, + }); + const promise = result.promise.value(); + + // On error, this function might free the CopyFileWindows struct. + // So we can no longer reference it beyond this point. + result.copyfile(); + + return promise; + } + + fn preparePathlike(pathlike: *JSC.Node.PathOrFileDescriptor, must_close: *bool, is_reading: bool) JSC.Maybe(bun.FileDescriptor) { + if (pathlike.* == .path) { + const fd = switch (bun.sys.openatWindowsT( + u8, + bun.invalid_fd, + pathlike.path.slice(), + if (is_reading) + bun.O.RDONLY + else + bun.O.WRONLY | bun.O.CREAT, + 0, + )) { + .result => |result| result.makeLibUVOwned() catch { + result.close(); + return .{ + .err = .{ + .errno = @as(c_int, @intCast(@intFromEnum(bun.sys.SystemErrno.EMFILE))), + .syscall = .open, + .path = pathlike.path.slice(), + }, + }; + }, + .err => |err| { + return .{ + .err = err, + }; + }, + }; + must_close.* = true; + return .{ .result = fd }; + } else { + // We assume that this is already a uv-casted file descriptor. + return .{ .result = pathlike.fd }; + } + } + + fn prepareReadWriteLoop(this: *CopyFileWindows) void { + // Open the destination first, so that if we need to call + // mkdirp(), we don't spend extra time opening the file handle for + // the source. + this.read_write_loop.destination_fd = switch (preparePathlike(&this.destination_file_store.data.file.pathlike, &this.read_write_loop.must_close_destination_fd, false)) { + .result => |fd| fd, + .err => |err| { + if (this.mkdirp_if_not_exists and err.getErrno() == .NOENT) { + this.mkdirp(); + return; + } + + this.throw(err); + return; + }, + }; + + this.read_write_loop.source_fd = switch (preparePathlike(&this.source_file_store.data.file.pathlike, &this.read_write_loop.must_close_source_fd, true)) { + .result => |fd| fd, + .err => |err| { + this.throw(err); + return; + }, + }; + + switch (this.read_write_loop.start(this)) { + .err => |err| { + this.throw(err); + return; + }, + .result => { + this.event_loop.refConcurrently(); + }, + } + } + + fn copyfile(this: *CopyFileWindows) void { + // This is for making it easier for us to test this code path + if (bun.getRuntimeFeatureFlag("BUN_FEATURE_FLAG_DISABLE_UV_FS_COPYFILE")) { + this.prepareReadWriteLoop(); + return; + } + + var pathbuf1: bun.PathBuffer = undefined; + var pathbuf2: bun.PathBuffer = undefined; + var destination_file_store = &this.destination_file_store.data.file; + var source_file_store = &this.source_file_store.data.file; + + const new_path: [:0]const u8 = brk: { + switch (destination_file_store.pathlike) { + .path => { + break :brk destination_file_store.pathlike.path.sliceZ(&pathbuf1); + }, + .fd => |fd| { + switch (bun.sys.File.from(fd).kind()) { + .err => |err| { + this.throw(err); + return; + }, + .result => |kind| { + switch (kind) { + .directory => { + this.throw(bun.sys.Error.fromCode(.ISDIR, .open)); + return; + }, + .character_device => { + this.prepareReadWriteLoop(); + return; + }, + else => { + const out = bun.getFdPath(fd, &pathbuf1) catch { + // This case can happen when either: + // - NUL device + // - Pipe. `cat foo.txt | bun bar.ts` + this.prepareReadWriteLoop(); + return; + }; + pathbuf1[out.len] = 0; + break :brk pathbuf1[0..out.len :0]; + }, + } + }, + } + }, + } + }; + const old_path: [:0]const u8 = brk: { + switch (source_file_store.pathlike) { + .path => { + break :brk source_file_store.pathlike.path.sliceZ(&pathbuf2); + }, + .fd => |fd| { + switch (bun.sys.File.from(fd).kind()) { + .err => |err| { + this.throw(err); + return; + }, + .result => |kind| { + switch (kind) { + .directory => { + this.throw(bun.sys.Error.fromCode(.ISDIR, .open)); + return; + }, + .character_device => { + this.prepareReadWriteLoop(); + return; + }, + else => { + const out = bun.getFdPath(fd, &pathbuf2) catch { + // This case can happen when either: + // - NUL device + // - Pipe. `cat foo.txt | bun bar.ts` + this.prepareReadWriteLoop(); + return; + }; + pathbuf2[out.len] = 0; + break :brk pathbuf2[0..out.len :0]; + }, + } + }, + } + }, + } + }; + const loop = this.event_loop.virtual_machine.event_loop_handle.?; + this.io_request.data = @ptrCast(this); + + const rc = libuv.uv_fs_copyfile( + loop, + &this.io_request, + old_path, + new_path, + 0, + &onCopyFile, + ); + + if (rc.errno()) |errno| { + this.throw(.{ + // #6336 + .errno = if (errno == @intFromEnum(bun.sys.SystemErrno.EPERM)) + @as(c_int, @intCast(@intFromEnum(bun.sys.SystemErrno.ENOENT))) + else + errno, + .syscall = .copyfile, + .path = old_path, + }); + return; + } + this.event_loop.refConcurrently(); + } + + pub fn throw(this: *CopyFileWindows, err: bun.sys.Error) void { + const globalThis = this.event_loop.global; + const promise = this.promise.swap(); + const err_instance = err.toJSC(globalThis); + var event_loop = this.event_loop; + event_loop.enter(); + defer event_loop.exit(); + this.deinit(); + promise.reject(globalThis, err_instance); + } + + fn onCopyFile(req: *libuv.fs_t) callconv(.C) void { + var this: *CopyFileWindows = @fieldParentPtr("io_request", req); + bun.assert(req.data == @as(?*anyopaque, @ptrCast(this))); + + var event_loop = this.event_loop; + event_loop.unrefConcurrently(); + const rc = req.result; + + bun.sys.syslog("uv_fs_copyfile() = {}", .{rc}); + if (rc.errEnum()) |errno| { + if (this.mkdirp_if_not_exists and errno == .NOENT) { + req.deinit(); + this.mkdirp(); + return; + } else { + var err = bun.sys.Error.fromCode( + // #6336 + if (errno == .PERM) .NOENT else errno, + + .copyfile, + ); + const destination = &this.destination_file_store.data.file; + + // we don't really know which one it is + if (destination.pathlike == .path) { + err = err.withPath(destination.pathlike.path.slice()); + } else if (destination.pathlike == .fd) { + err = err.withFd(destination.pathlike.fd); + } + + this.throw(err); + } + return; + } + + this.onComplete(req.statbuf.size); + } + + pub fn onComplete(this: *CopyFileWindows, written_actual: usize) void { + var written = written_actual; + if (written != @as(@TypeOf(written), @intCast(this.size)) and this.size != Blob.max_size) { + this.truncate(); + written = @intCast(this.size); + } + const globalThis = this.event_loop.global; + const promise = this.promise.swap(); + var event_loop = this.event_loop; + event_loop.enter(); + defer event_loop.exit(); + + this.deinit(); + promise.resolve(globalThis, JSC.JSValue.jsNumberFromUint64(written)); + } + + fn truncate(this: *CopyFileWindows) void { + // TODO: optimize this + @branchHint(.cold); + + var node_fs: JSC.Node.fs.NodeFS = .{}; + _ = node_fs.truncate( + .{ + .path = this.destination_file_store.data.file.pathlike, + .len = @intCast(this.size), + }, + .sync, + ); + } + + pub fn deinit(this: *CopyFileWindows) void { + this.read_write_loop.close(); + this.destination_file_store.deref(); + this.source_file_store.deref(); + this.promise.deinit(); + this.io_request.deinit(); + bun.destroy(this); + } + + fn mkdirp( + this: *CopyFileWindows, + ) void { + bun.sys.syslog("mkdirp", .{}); + this.mkdirp_if_not_exists = false; + var destination = &this.destination_file_store.data.file; + if (destination.pathlike != .path) { + this.throw(.{ + .errno = @as(c_int, @intCast(@intFromEnum(bun.sys.SystemErrno.EINVAL))), + .syscall = .mkdir, + }); + return; + } + + this.event_loop.refConcurrently(); + JSC.Node.fs.Async.AsyncMkdirp.new(.{ + .completion = @ptrCast(&onMkdirpCompleteConcurrent), + .completion_ctx = this, + .path = bun.Dirname.dirname(u8, destination.pathlike.path.slice()) + // this shouldn't happen + orelse destination.pathlike.path.slice(), + }).schedule(); + } + + fn onMkdirpComplete(this: *CopyFileWindows) void { + this.event_loop.unrefConcurrently(); + + if (this.err) |err| { + this.throw(err); + bun.default_allocator.free(err.path); + return; + } + + this.copyfile(); + } + + fn onMkdirpCompleteConcurrent(this: *CopyFileWindows, err_: JSC.Maybe(void)) void { + bun.sys.syslog("mkdirp complete", .{}); + assert(this.err == null); + this.err = if (err_ == .err) err_.err else null; + this.event_loop.enqueueTaskConcurrent(JSC.ConcurrentTask.create(JSC.ManagedTask.New(CopyFileWindows, onMkdirpComplete).init(this))); + } +}; + +pub const IOWhich = enum { + source, + destination, + both, +}; + +const unsupported_directory_error = SystemError{ + .errno = @as(c_int, @intCast(@intFromEnum(bun.sys.SystemErrno.EISDIR))), + .message = bun.String.static("That doesn't work on folders"), + .syscall = bun.String.static("fstat"), +}; +const unsupported_non_regular_file_error = SystemError{ + .errno = @as(c_int, @intCast(@intFromEnum(bun.sys.SystemErrno.ENOTSUP))), + .message = bun.String.static("Non-regular files aren't supported yet"), + .syscall = bun.String.static("fstat"), +}; + +pub const CopyFilePromiseTask = JSC.ConcurrentPromiseTask(CopyFile); +pub const CopyFilePromiseTaskEventLoopTask = CopyFilePromiseTask.EventLoopTask; + +const std = @import("std"); +const bun = @import("bun"); +const Environment = bun.Environment; +const assert = bun.assert; +const libuv = bun.windows.libuv; + +const webcore = bun.webcore; +const Blob = webcore.Blob; +const Store = Blob.Store; +const SizeType = Blob.SizeType; + +const JSC = bun.jsc; +const SystemError = JSC.SystemError; +const JSGlobalObject = JSC.JSGlobalObject; +const JSValue = JSC.JSValue; diff --git a/src/bun.js/webcore/blob/ReadFile.zig b/src/bun.js/webcore/blob/read_file.zig similarity index 98% rename from src/bun.js/webcore/blob/ReadFile.zig rename to src/bun.js/webcore/blob/read_file.zig index ef8699b0c3..35180fd8f1 100644 --- a/src/bun.js/webcore/blob/ReadFile.zig +++ b/src/bun.js/webcore/blob/read_file.zig @@ -1,14 +1,14 @@ const bun = @import("bun"); const JSC = bun.JSC; const std = @import("std"); -const Blob = bun.JSC.WebCore.Blob; +const Blob = bun.webcore.Blob; const invalid_fd = bun.invalid_fd; const SystemError = JSC.SystemError; const SizeType = Blob.SizeType; const io = bun.io; -const FileOpener = Store.FileOpener; -const FileCloser = Store.FileCloser; +const FileOpener = Blob.FileOpener; +const FileCloser = Blob.FileCloser; const Environment = bun.Environment; const bloblog = bun.Output.scoped(.WriteFile, true); const JSPromise = JSC.JSPromise; @@ -51,8 +51,8 @@ pub fn NewReadFileHandler(comptime Function: anytype) type { }; } -const FileStore = Blob.FileStore; -const ByteStore = Blob.ByteStore; +const FileStore = Blob.Store.File; +const ByteStore = Blob.Store.Bytes; const Store = Blob.Store; const ClosingState = Blob.ClosingState; @@ -708,8 +708,8 @@ pub const ReadFileUV = struct { } // Out of memory we can't read more than 4GB at a time (ULONG) on Windows if (this.size > @as(usize, std.math.maxInt(bun.windows.ULONG))) { - this.errno = bun.errnoToZigErr(bun.C.E.NOMEM); - this.system_error = bun.sys.Error.fromCode(bun.C.E.NOMEM, .read).toSystemError(); + this.errno = bun.errnoToZigErr(bun.sys.E.NOMEM); + this.system_error = bun.sys.Error.fromCode(bun.sys.E.NOMEM, .read).toSystemError(); this.onFinish(); return; } diff --git a/src/bun.js/webcore/blob/WriteFile.zig b/src/bun.js/webcore/blob/write_file.zig similarity index 98% rename from src/bun.js/webcore/blob/WriteFile.zig rename to src/bun.js/webcore/blob/write_file.zig index ea6667245e..fd17922d7c 100644 --- a/src/bun.js/webcore/blob/WriteFile.zig +++ b/src/bun.js/webcore/blob/write_file.zig @@ -1,22 +1,3 @@ -const bun = @import("bun"); -const JSC = bun.JSC; -const std = @import("std"); -const Blob = JSC.WebCore.Blob; -const invalid_fd = bun.invalid_fd; - -const SystemError = JSC.SystemError; -const SizeType = Blob.SizeType; -const io = bun.io; -const FileOpener = Blob.Store.FileOpener; -const FileCloser = Blob.Store.FileCloser; -const Environment = bun.Environment; -const bloblog = bun.Output.scoped(.WriteFile, true); -const JSPromise = JSC.JSPromise; -const JSGlobalObject = JSC.JSGlobalObject; -const ZigString = JSC.ZigString; - -const ClosingState = Blob.ClosingState; - pub const WriteFileResultType = SystemError.Maybe(SizeType); pub const WriteFileOnWriteFileCallback = *const fn (ctx: *anyopaque, count: WriteFileResultType) void; pub const WriteFileTask = JSC.WorkTask(WriteFile); @@ -288,7 +269,11 @@ pub const WriteFile = struct { // seemed to have zero performance impact in // microbenchmarks. if (!this.could_block and this.bytes_blob.sharedView().len > 1024) { - bun.C.preallocate_file(fd.cast(), 0, @intCast(this.bytes_blob.sharedView().len)) catch {}; // we don't care if it fails. + bun.sys.preallocate_file( + fd.cast(), + 0, + @intCast(this.bytes_blob.sharedView().len), + ) catch {}; // we don't care if it fails. } } @@ -434,7 +419,7 @@ pub const WriteFileWindows = struct { &this.io_request, &(std.posix.toPosixPath(path) catch { this.throw(bun.sys.Error{ - .errno = @intFromEnum(bun.C.E.NAMETOOLONG), + .errno = @intFromEnum(bun.sys.E.NAMETOOLONG), .syscall = .open, }); return; @@ -495,7 +480,7 @@ pub const WriteFileWindows = struct { this.event_loop.refConcurrently(); const path = this.file_blob.store.?.data.file.pathlike.path.slice(); - JSC.Node.Async.AsyncMkdirp.new(.{ + JSC.Node.fs.Async.AsyncMkdirp.new(.{ .completion = @ptrCast(&onMkdirpCompleteConcurrent), .completion_ctx = this, .path = bun.Dirname.dirname(u8, path) @@ -671,7 +656,6 @@ pub const WriteFilePromise = struct { } }; -const Body = JSC.WebCore.Body; pub const WriteFileWaitFromLockedValueTask = struct { file_blob: Blob, globalThis: *JSGlobalObject, @@ -731,3 +715,24 @@ pub const WriteFileWaitFromLockedValueTask = struct { } } }; + +const Body = JSC.WebCore.Body; + +const bun = @import("bun"); +const JSC = bun.JSC; +const std = @import("std"); +const Blob = JSC.WebCore.Blob; +const invalid_fd = bun.invalid_fd; + +const SystemError = JSC.SystemError; +const SizeType = Blob.SizeType; +const io = bun.io; +const FileOpener = Blob.FileOpener; +const FileCloser = Blob.FileCloser; +const Environment = bun.Environment; +const bloblog = bun.Output.scoped(.WriteFile, true); +const JSPromise = JSC.JSPromise; +const JSGlobalObject = JSC.JSGlobalObject; +const ZigString = JSC.ZigString; + +const ClosingState = Blob.ClosingState; diff --git a/src/bun.js/webcore/body.zig b/src/bun.js/webcore/body.zig deleted file mode 100644 index cb09d8c641..0000000000 --- a/src/bun.js/webcore/body.zig +++ /dev/null @@ -1,1735 +0,0 @@ -const std = @import("std"); -const Api = @import("../../api/schema.zig").Api; -const bun = @import("bun"); -const MimeType = bun.http.MimeType; -const ZigURL = @import("../../url.zig").URL; -const HTTPClient = bun.http; -const JSC = bun.JSC; - -const Method = @import("../../http/method.zig").Method; -const FetchHeaders = JSC.FetchHeaders; -const ObjectPool = @import("../../pool.zig").ObjectPool; -const SystemError = JSC.SystemError; -const Output = bun.Output; -const MutableString = bun.MutableString; -const strings = bun.strings; -const string = bun.string; -const default_allocator = bun.default_allocator; -const FeatureFlags = bun.FeatureFlags; -const ArrayBuffer = @import("../base.zig").ArrayBuffer; -const Properties = @import("../base.zig").Properties; - -const castObj = @import("../base.zig").castObj; -const getAllocator = @import("../base.zig").getAllocator; - -const Environment = @import("../../env.zig"); -const ZigString = JSC.ZigString; -const IdentityContext = @import("../../identity_context.zig").IdentityContext; -const JSPromise = JSC.JSPromise; -const JSValue = JSC.JSValue; -const JSGlobalObject = JSC.JSGlobalObject; -const NullableAllocator = bun.NullableAllocator; - -const VirtualMachine = JSC.VirtualMachine; -const Task = JSC.Task; -const JSPrinter = bun.js_printer; -const picohttp = bun.picohttp; -const StringJoiner = bun.StringJoiner; -const uws = bun.uws; - -const Blob = JSC.WebCore.Blob; -// const InlineBlob = JSC.WebCore.InlineBlob; -const AnyBlob = JSC.WebCore.AnyBlob; -const InternalBlob = JSC.WebCore.InternalBlob; -const Response = JSC.WebCore.Response; -const Request = JSC.WebCore.Request; - -// https://developer.mozilla.org/en-US/docs/Web/API/Body -pub const Body = struct { - value: Value, // = Value.empty, - - pub inline fn len(this: *const Body) Blob.SizeType { - return this.value.size(); - } - - pub fn slice(this: *const Body) []const u8 { - return this.value.slice(); - } - - pub fn use(this: *Body) Blob { - return this.value.use(); - } - - pub fn clone(this: *Body, globalThis: *JSGlobalObject) Body { - return Body{ - .value = this.value.clone(globalThis), - }; - } - - pub fn writeFormat(this: *Body, comptime Formatter: type, formatter: *Formatter, writer: anytype, comptime enable_ansi_colors: bool) !void { - const Writer = @TypeOf(writer); - - try formatter.writeIndent(Writer, writer); - try writer.writeAll(comptime Output.prettyFmt("bodyUsed: ", enable_ansi_colors)); - try formatter.printAs(.Boolean, Writer, writer, JSC.JSValue.jsBoolean(this.value == .Used), .BooleanObject, enable_ansi_colors); - - if (this.value == .Blob) { - try formatter.printComma(Writer, writer, enable_ansi_colors); - try writer.writeAll("\n"); - try formatter.writeIndent(Writer, writer); - try this.value.Blob.writeFormat(Formatter, formatter, writer, enable_ansi_colors); - } else if (this.value == .InternalBlob or this.value == .WTFStringImpl) { - try formatter.printComma(Writer, writer, enable_ansi_colors); - try writer.writeAll("\n"); - try formatter.writeIndent(Writer, writer); - try Blob.writeFormatForSize(false, this.value.size(), writer, enable_ansi_colors); - } else if (this.value == .Locked) { - if (this.value.Locked.readable.get(this.value.Locked.global)) |stream| { - try formatter.printComma(Writer, writer, enable_ansi_colors); - try writer.writeAll("\n"); - try formatter.writeIndent(Writer, writer); - try formatter.printAs(.Object, Writer, writer, stream.value, stream.value.jsType(), enable_ansi_colors); - } - } - } - - pub fn deinit(this: *Body, _: std.mem.Allocator) void { - this.value.deinit(); - } - - pub const PendingValue = struct { - promise: ?JSValue = null, - readable: JSC.WebCore.ReadableStream.Strong = .{}, - // writable: JSC.WebCore.Sink - - global: *JSGlobalObject, - task: ?*anyopaque = null, - - /// runs after the data is available. - onReceiveValue: ?*const fn (ctx: *anyopaque, value: *Value) void = null, - - /// conditionally runs when requesting data - /// used in HTTP server to ignore request bodies unless asked for it - onStartBuffering: ?*const fn (ctx: *anyopaque) void = null, - onStartStreaming: ?*const fn (ctx: *anyopaque) JSC.WebCore.DrainResult = null, - onReadableStreamAvailable: ?*const fn (ctx: *anyopaque, globalThis: *JSC.JSGlobalObject, readable: JSC.WebCore.ReadableStream) void = null, - size_hint: Blob.SizeType = 0, - - deinit: bool = false, - action: Action = Action{ .none = {} }, - - /// For Http Client requests - /// when Content-Length is provided this represents the whole size of the request - /// If chunked encoded this will represent the total received size (ignoring the chunk headers) - /// If the size is unknown will be 0 - fn sizeHint(this: *const PendingValue) Blob.SizeType { - if (this.readable.get(this.global)) |readable| { - if (readable.ptr == .Bytes) { - return readable.ptr.Bytes.size_hint; - } - } - return this.size_hint; - } - - pub fn toAnyBlob(this: *PendingValue) ?AnyBlob { - if (this.promise != null) - return null; - - return this.toAnyBlobAllowPromise(); - } - - pub fn isDisturbed(this: *const PendingValue, comptime T: type, globalObject: *JSC.JSGlobalObject, this_value: JSC.JSValue) bool { - if (this.promise != null) { - return true; - } - - if (T.js.bodyGetCached(this_value)) |body_value| { - if (JSC.WebCore.ReadableStream.isDisturbedValue(body_value, globalObject)) { - return true; - } - - return false; - } - - if (this.readable.get(globalObject)) |readable| { - return readable.isDisturbed(globalObject); - } - - return false; - } - - pub fn isDisturbed2(this: *const PendingValue, globalObject: *JSC.JSGlobalObject) bool { - if (this.promise != null) { - return true; - } - - if (this.readable.get(globalObject)) |readable| { - return readable.isDisturbed(globalObject); - } - - return false; - } - pub fn isStreamingOrBuffering(this: *PendingValue) bool { - return this.readable.held.has() or (this.promise != null and !this.promise.?.isEmptyOrUndefinedOrNull()); - } - - pub fn hasPendingPromise(this: *PendingValue) bool { - const promise = this.promise orelse return false; - - if (promise.asAnyPromise()) |internal| { - if (internal.status(this.global.vm()) != .pending) { - promise.unprotect(); - this.promise = null; - return false; - } - - return true; - } - - this.promise = null; - return false; - } - - pub fn toAnyBlobAllowPromise(this: *PendingValue) ?AnyBlob { - var stream = if (this.readable.get(this.global)) |readable| readable else return null; - - if (stream.toAnyBlob(this.global)) |blob| { - this.readable.deinit(); - return blob; - } - - return null; - } - - pub fn setPromise(value: *PendingValue, globalThis: *JSC.JSGlobalObject, action: Action) JSValue { - value.action = action; - if (value.readable.get(globalThis)) |readable| { - switch (action) { - .getFormData, .getText, .getJSON, .getBlob, .getArrayBuffer, .getBytes => { - const promise = switch (action) { - .getJSON => globalThis.readableStreamToJSON(readable.value), - .getArrayBuffer => globalThis.readableStreamToArrayBuffer(readable.value), - .getBytes => globalThis.readableStreamToBytes(readable.value), - .getText => globalThis.readableStreamToText(readable.value), - .getBlob => globalThis.readableStreamToBlob(readable.value), - .getFormData => |form_data| brk: { - defer { - form_data.?.deinit(); - value.action.getFormData = null; - } - - break :brk globalThis.readableStreamToFormData(readable.value, switch (form_data.?.encoding) { - .Multipart => |multipart| bun.String.init(multipart).toJS(globalThis), - .URLEncoded => .undefined, - }); - }, - else => unreachable, - }; - value.readable.deinit(); - // The ReadableStream within is expected to keep this Promise alive. - // If you try to protect() this, it will leak memory because the other end of the ReadableStream won't call it. - // See https://github.com/oven-sh/bun/issues/13678 - return promise; - }, - - .none => {}, - } - } - - { - var promise = JSC.JSPromise.create(globalThis); - const promise_value = promise.asValue(globalThis); - value.promise = promise_value; - promise_value.protect(); - - if (value.onStartBuffering) |onStartBuffering| { - value.onStartBuffering = null; - onStartBuffering(value.task.?); - } - return promise_value; - } - } - - pub const Action = union(enum) { - none: void, - getText: void, - getJSON: void, - getArrayBuffer: void, - getBytes: void, - getBlob: void, - getFormData: ?*bun.FormData.AsyncFormData, - }; - }; - - /// This is a duplex stream! - pub const Value = union(Tag) { - const log = Output.scoped(.BodyValue, false); - Blob: Blob, - - /// This is the String type from WebKit - /// It is reference counted, so we must always deref it (which this does automatically) - /// Be careful where it can directly be used. - /// - /// If it is a latin1 string with only ascii, we can use it directly. - /// Otherwise, we must convert it to utf8. - /// - /// Unless we are sending it directly to JavaScript, for example: - /// - /// var str = "hello world 🤭" - /// var response = new Response(str); - /// /* Body.Value stays WTFStringImpl */ - /// var body = await response.text(); - /// - /// In this case, even though there's an emoji, we can use the StringImpl directly. - /// BUT, if we were instead using it in the HTTP server, this cannot be used directly. - /// - /// When the server calls .toBlobIfPossible(), we will automatically - /// convert this Value to an InternalBlob - /// - /// Example code: - /// - /// Bun.serve({ - /// fetch(req) { - /// /* Body.Value becomes InternalBlob */ - /// return new Response("hello world 🤭"); - /// } - /// }) - /// - /// This works for .json(), too. - WTFStringImpl: bun.WTF.StringImpl, - /// Single-use Blob - /// Avoids a heap allocation. - InternalBlob: InternalBlob, - /// Single-use Blob that stores the bytes in the Value itself. - // InlineBlob: InlineBlob, - Locked: PendingValue, - Used, - Empty, - Error: ValueError, - Null, - - // We may not have all the data yet - // So we can't know for sure if it's empty or not - // We CAN know that it is definitely empty. - pub fn isDefinitelyEmpty(this: *const Value) bool { - return switch (this.*) { - .Null => true, - .Used, .Empty => true, - .InternalBlob => this.InternalBlob.slice().len == 0, - .Blob => this.Blob.size == 0, - .WTFStringImpl => this.WTFStringImpl.length() == 0, - .Error, .Locked => false, - }; - } - - pub const heap_breakdown_label = "BodyValue"; - pub const ValueError = union(enum) { - AbortReason: JSC.CommonAbortReason, - SystemError: JSC.SystemError, - Message: bun.String, - JSValue: JSC.Strong, - - pub fn toStreamError(this: *@This(), globalObject: *JSC.JSGlobalObject) JSC.WebCore.StreamResult.StreamError { - return switch (this.*) { - .AbortReason => .{ - .AbortReason = this.AbortReason, - }, - else => .{ - .JSValue = this.toJS(globalObject), - }, - }; - } - - pub fn toJS(this: *@This(), globalObject: *JSC.JSGlobalObject) JSC.JSValue { - const js_value = switch (this.*) { - .AbortReason => |reason| reason.toJS(globalObject), - .SystemError => |system_error| system_error.toErrorInstance(globalObject), - .Message => |message| message.toErrorInstance(globalObject), - // do a early return in this case we don't need to create a new Strong - .JSValue => |js_value| return js_value.get() orelse JSC.JSValue.jsUndefined(), - }; - this.* = .{ .JSValue = JSC.Strong.create(js_value, globalObject) }; - return js_value; - } - - pub fn dupe(this: *const @This(), globalObject: *JSC.JSGlobalObject) @This() { - var value = this.*; - switch (this.*) { - .SystemError => value.SystemError.ref(), - .Message => value.Message.ref(), - .JSValue => |js_ref| { - if (js_ref.get()) |js_value| { - return .{ .JSValue = JSC.Strong.create(js_value, globalObject) }; - } - return .{ .JSValue = .empty }; - }, - .AbortReason => {}, - } - return value; - } - - pub fn deinit(this: *@This()) void { - switch (this.*) { - .SystemError => |system_error| system_error.deref(), - .Message => |message| message.deref(), - .JSValue => this.JSValue.deinit(), - .AbortReason => {}, - } - // safe empty value after deinit - this.* = .{ .JSValue = .empty }; - } - }; - pub fn toBlobIfPossible(this: *Value) void { - if (this.* == .WTFStringImpl) { - if (this.WTFStringImpl.toUTF8IfNeeded(bun.default_allocator)) |bytes| { - var str = this.WTFStringImpl; - defer str.deref(); - this.* = .{ - .InternalBlob = InternalBlob{ - .bytes = std.ArrayList(u8).fromOwnedSlice(bun.default_allocator, @constCast(bytes.slice())), - .was_string = true, - }, - }; - } - } - - if (this.* != .Locked) - return; - - if (this.Locked.toAnyBlob()) |blob| { - this.* = switch (blob) { - .Blob => .{ .Blob = blob.Blob }, - .InternalBlob => .{ .InternalBlob = blob.InternalBlob }, - .WTFStringImpl => .{ .WTFStringImpl = blob.WTFStringImpl }, - // .InlineBlob => .{ .InlineBlob = blob.InlineBlob }, - }; - } - } - - pub fn size(this: *const Value) Blob.SizeType { - return switch (this.*) { - .Blob => this.Blob.size, - .InternalBlob => @as(Blob.SizeType, @truncate(this.InternalBlob.sliceConst().len)), - .WTFStringImpl => @as(Blob.SizeType, @truncate(this.WTFStringImpl.utf8ByteLength())), - .Locked => this.Locked.sizeHint(), - // .InlineBlob => @truncate(Blob.SizeType, this.InlineBlob.sliceConst().len), - else => 0, - }; - } - - pub fn fastSize(this: *const Value) Blob.SizeType { - return switch (this.*) { - .InternalBlob => @as(Blob.SizeType, @truncate(this.InternalBlob.sliceConst().len)), - .WTFStringImpl => @as(Blob.SizeType, @truncate(this.WTFStringImpl.byteSlice().len)), - .Locked => this.Locked.sizeHint(), - // .InlineBlob => @truncate(Blob.SizeType, this.InlineBlob.sliceConst().len), - else => 0, - }; - } - - pub fn memoryCost(this: *const Value) usize { - return switch (this.*) { - .InternalBlob => this.InternalBlob.bytes.items.len, - .WTFStringImpl => this.WTFStringImpl.memoryCost(), - .Locked => this.Locked.sizeHint(), - // .InlineBlob => this.InlineBlob.sliceConst().len, - else => 0, - }; - } - - pub fn estimatedSize(this: *const Value) usize { - return switch (this.*) { - .InternalBlob => this.InternalBlob.sliceConst().len, - .WTFStringImpl => this.WTFStringImpl.byteSlice().len, - .Locked => this.Locked.sizeHint(), - // .InlineBlob => this.InlineBlob.sliceConst().len, - else => 0, - }; - } - - pub fn createBlobValue(data: []u8, allocator: std.mem.Allocator, was_string: bool) Value { - // if (data.len <= InlineBlob.available_bytes) { - // var _blob = InlineBlob{ - // .bytes = undefined, - // .was_string = was_string, - // .len = @truncate(InlineBlob.IntSize, data.len), - // }; - // @memcpy(&_blob.bytes, data.ptr, data.len); - // allocator.free(data); - // return Value{ - // .InlineBlob = _blob, - // }; - // } - - return Value{ - .InternalBlob = InternalBlob{ - .bytes = std.ArrayList(u8).fromOwnedSlice(allocator, data), - .was_string = was_string, - }, - }; - } - - pub const Tag = enum { - Blob, - WTFStringImpl, - InternalBlob, - // InlineBlob, - Locked, - Used, - Empty, - Error, - Null, - }; - - // pub const empty = Value{ .Empty = {} }; - - pub fn toReadableStream(this: *Value, globalThis: *JSGlobalObject) JSValue { - JSC.markBinding(@src()); - - switch (this.*) { - .Used => { - return JSC.WebCore.ReadableStream.used(globalThis); - }, - .Empty => { - return JSC.WebCore.ReadableStream.empty(globalThis); - }, - .Null => { - return JSValue.null; - }, - .InternalBlob, .Blob, .WTFStringImpl => { - var blob = this.use(); - defer blob.detach(); - blob.resolveSize(); - const value = JSC.WebCore.ReadableStream.fromBlob(globalThis, &blob, blob.size); - - this.* = .{ - .Locked = .{ - .readable = JSC.WebCore.ReadableStream.Strong.init(JSC.WebCore.ReadableStream.fromJS(value, globalThis).?, globalThis), - .global = globalThis, - }, - }; - return value; - }, - .Locked => { - var locked = &this.Locked; - if (locked.readable.get(globalThis)) |readable| { - return readable.value; - } - if (locked.promise != null or locked.action != .none) { - return JSC.WebCore.ReadableStream.used(globalThis); - } - var drain_result: JSC.WebCore.DrainResult = .{ - .estimated_size = 0, - }; - - if (locked.onStartStreaming) |drain| { - locked.onStartStreaming = null; - drain_result = drain(locked.task.?); - } - - if (drain_result == .empty or drain_result == .aborted) { - this.* = .{ .Null = {} }; - return JSC.WebCore.ReadableStream.empty(globalThis); - } - - var reader = JSC.WebCore.ByteStream.Source.new(.{ - .context = undefined, - .globalThis = globalThis, - }); - - reader.context.setup(); - - if (drain_result == .estimated_size) { - reader.context.highWaterMark = @as(Blob.SizeType, @truncate(drain_result.estimated_size)); - reader.context.size_hint = @as(Blob.SizeType, @truncate(drain_result.estimated_size)); - } else if (drain_result == .owned) { - reader.context.buffer = drain_result.owned.list; - reader.context.size_hint = @as(Blob.SizeType, @truncate(drain_result.owned.size_hint)); - } - - locked.readable = JSC.WebCore.ReadableStream.Strong.init(.{ - .ptr = .{ .Bytes = &reader.context }, - .value = reader.toReadableStream(globalThis), - }, globalThis); - - if (locked.onReadableStreamAvailable) |onReadableStreamAvailable| { - onReadableStreamAvailable(locked.task.?, globalThis, locked.readable.get(globalThis).?); - } - - return locked.readable.get(globalThis).?.value; - }, - .Error => { - // TODO: handle error properly - return JSC.WebCore.ReadableStream.empty(globalThis); - }, - } - } - - pub fn fromJS(globalThis: *JSGlobalObject, value: JSValue) bun.JSError!Value { - value.ensureStillAlive(); - - if (value.isEmptyOrUndefinedOrNull()) { - return Body.Value{ - .Null = {}, - }; - } - - const js_type = value.jsType(); - - if (js_type.isStringLike()) { - var str = try value.toBunString(globalThis); - if (str.length() == 0) { - return Body.Value{ - .Empty = {}, - }; - } - - assert(str.tag == .WTFStringImpl); - - return Body.Value{ - .WTFStringImpl = str.value.WTFStringImpl, - }; - } - - if (js_type.isTypedArrayOrArrayBuffer()) { - if (value.asArrayBuffer(globalThis)) |buffer| { - const bytes = buffer.byteSlice(); - - if (bytes.len == 0) { - return Body.Value{ - .Empty = {}, - }; - } - - // if (bytes.len <= InlineBlob.available_bytes) { - // return Body.Value{ - // .InlineBlob = InlineBlob.init(bytes), - // }; - // } - - return Body.Value{ - .InternalBlob = .{ - .bytes = std.ArrayList(u8){ - .items = bun.default_allocator.dupe(u8, bytes) catch { - return globalThis.throwValue(ZigString.static("Failed to clone ArrayBufferView").toErrorInstance(globalThis)); - }, - .capacity = bytes.len, - .allocator = bun.default_allocator, - }, - .was_string = false, - }, - }; - } - } - - if (value.as(JSC.DOMFormData)) |form_data| { - return Body.Value{ - .Blob = Blob.fromDOMFormData(globalThis, bun.default_allocator, form_data), - }; - } - - if (value.as(JSC.URLSearchParams)) |search_params| { - return Body.Value{ - .Blob = Blob.fromURLSearchParams(globalThis, bun.default_allocator, search_params), - }; - } - - if (js_type == .DOMWrapper) { - if (value.as(Blob)) |blob| { - return Body.Value{ - .Blob = blob.dupe(), - }; - } - } - - value.ensureStillAlive(); - - if (JSC.WebCore.ReadableStream.fromJS(value, globalThis)) |readable| { - if (readable.isDisturbed(globalThis)) { - return globalThis.throw("ReadableStream has already been used", .{}); - } - - switch (readable.ptr) { - .Blob => |blob| { - const store = blob.detachStore() orelse { - return Body.Value{ .Blob = Blob.initEmpty(globalThis) }; - }; - - readable.forceDetach(globalThis); - - const result: Value = .{ - .Blob = Blob.initWithStore(store, globalThis), - }; - - return result; - }, - else => {}, - } - - return Body.Value.fromReadableStreamWithoutLockCheck(readable, globalThis); - } - - return Body.Value{ - .Blob = Blob.get(globalThis, value, true, false) catch |err| { - if (!globalThis.hasException()) { - if (err == error.InvalidArguments) { - return globalThis.throwInvalidArguments("Expected an Array", .{}); - } - - return globalThis.throwInvalidArguments("Invalid Body object", .{}); - } - - return error.JSError; - }, - }; - } - - pub fn fromReadableStreamWithoutLockCheck(readable: JSC.WebCore.ReadableStream, globalThis: *JSGlobalObject) Value { - return .{ - .Locked = .{ - .readable = JSC.WebCore.ReadableStream.Strong.init(readable, globalThis), - .global = globalThis, - }, - }; - } - - pub fn resolve( - to_resolve: *Value, - new: *Value, - global: *JSGlobalObject, - headers: ?*FetchHeaders, - ) void { - log("resolve", .{}); - if (to_resolve.* == .Locked) { - var locked = &to_resolve.Locked; - - if (locked.readable.get(global)) |readable| { - readable.done(global); - locked.readable.deinit(); - } - - if (locked.onReceiveValue) |callback| { - locked.onReceiveValue = null; - callback(locked.task.?, new); - return; - } - - if (locked.promise) |promise_| { - const promise = promise_.asAnyPromise().?; - locked.promise = null; - - switch (locked.action) { - // These ones must use promise.wrap() to handle exceptions thrown while calling .toJS() on the value. - // These exceptions can happen if the String is too long, ArrayBuffer is too large, JSON parse error, etc. - .getText => { - switch (new.*) { - .WTFStringImpl, - .InternalBlob, - // .InlineBlob, - => { - var blob = new.useAsAnyBlobAllowNonUTF8String(); - promise.wrap(global, AnyBlob.toStringTransfer, .{ &blob, global }); - }, - else => { - var blob = new.use(); - promise.wrap(global, Blob.toStringTransfer, .{ &blob, global }); - }, - } - }, - .getJSON => { - var blob = new.useAsAnyBlobAllowNonUTF8String(); - promise.wrap(global, AnyBlob.toJSONShare, .{ &blob, global }); - blob.detach(); - }, - .getArrayBuffer => { - var blob = new.useAsAnyBlobAllowNonUTF8String(); - promise.wrap(global, AnyBlob.toArrayBufferTransfer, .{ &blob, global }); - }, - .getBytes => { - var blob = new.useAsAnyBlobAllowNonUTF8String(); - promise.wrap(global, AnyBlob.toUint8ArrayTransfer, .{ &blob, global }); - }, - - .getFormData => inner: { - var blob = new.useAsAnyBlob(); - defer blob.detach(); - var async_form_data: *bun.FormData.AsyncFormData = locked.action.getFormData orelse { - promise.reject(global, ZigString.init("Internal error: task for FormData must not be null").toErrorInstance(global)); - break :inner; - }; - defer async_form_data.deinit(); - async_form_data.toJS(global, blob.slice(), promise); - }, - .none, .getBlob => { - var blob = Blob.new(new.use()); - blob.allocator = bun.default_allocator; - if (headers) |fetch_headers| { - if (fetch_headers.fastGet(.ContentType)) |content_type| { - var content_slice = content_type.toSlice(bun.default_allocator); - defer content_slice.deinit(); - var allocated = false; - const mimeType = MimeType.init(content_slice.slice(), bun.default_allocator, &allocated); - blob.content_type = mimeType.value; - blob.content_type_allocated = allocated; - blob.content_type_was_set = true; - if (blob.store != null) { - blob.store.?.mime_type = mimeType; - } - } - } - if (!blob.content_type_was_set and blob.store != null) { - blob.content_type = MimeType.text.value; - blob.content_type_allocated = false; - blob.content_type_was_set = true; - blob.store.?.mime_type = MimeType.text; - } - promise.resolve(global, blob.toJS(global)); - }, - } - JSC.C.JSValueUnprotect(global, promise_.asObjectRef()); - } - } - } - pub fn slice(this: *const Value) []const u8 { - return switch (this.*) { - .Blob => this.Blob.sharedView(), - .InternalBlob => this.InternalBlob.sliceConst(), - .WTFStringImpl => if (this.WTFStringImpl.canUseAsUTF8()) this.WTFStringImpl.latin1Slice() else "", - // .InlineBlob => this.InlineBlob.sliceConst(), - else => "", - }; - } - - pub fn use(this: *Value) Blob { - this.toBlobIfPossible(); - - switch (this.*) { - .Blob => { - const new_blob = this.Blob; - assert(new_blob.allocator == null); // owned by Body - this.* = .{ .Used = {} }; - return new_blob; - }, - .InternalBlob => { - const new_blob = Blob.init( - this.InternalBlob.toOwnedSlice(), - // we will never resize it from here - // we have to use the default allocator - // even if it was actually allocated on a different thread - bun.default_allocator, - JSC.VirtualMachine.get().global, - ); - - this.* = .{ .Used = {} }; - return new_blob; - }, - .WTFStringImpl => { - var new_blob: Blob = undefined; - var wtf = this.WTFStringImpl; - defer wtf.deref(); - if (wtf.toUTF8IfNeeded(bun.default_allocator)) |allocated_slice| { - new_blob = Blob.init( - @constCast(allocated_slice.slice()), - bun.default_allocator, - JSC.VirtualMachine.get().global, - ); - } else { - new_blob = Blob.init( - bun.default_allocator.dupe(u8, wtf.latin1Slice()) catch bun.outOfMemory(), - bun.default_allocator, - JSC.VirtualMachine.get().global, - ); - } - - this.* = .{ .Used = {} }; - return new_blob; - }, - // .InlineBlob => { - // const cloned = this.InlineBlob.bytes; - // // keep same behavior as InternalBlob but clone the data - // const new_blob = Blob.create( - // cloned[0..this.InlineBlob.len], - // bun.default_allocator, - // JSC.VirtualMachine.get().global, - // false, - // ); - - // this.* = .{ .Used = {} }; - // return new_blob; - // }, - else => { - return Blob.initEmpty(undefined); - }, - } - } - - pub fn tryUseAsAnyBlob(this: *Value) ?AnyBlob { - if (this.* == .WTFStringImpl) { - if (this.WTFStringImpl.canUseAsUTF8()) { - return AnyBlob{ .WTFStringImpl = this.WTFStringImpl }; - } - } - - const any_blob: AnyBlob = switch (this.*) { - .Blob => AnyBlob{ .Blob = this.Blob }, - .InternalBlob => AnyBlob{ .InternalBlob = this.InternalBlob }, - // .InlineBlob => AnyBlob{ .InlineBlob = this.InlineBlob }, - .Locked => this.Locked.toAnyBlobAllowPromise() orelse return null, - else => return null, - }; - - this.* = .{ .Used = {} }; - return any_blob; - } - - pub fn useAsAnyBlob(this: *Value) AnyBlob { - const any_blob: AnyBlob = switch (this.*) { - .Blob => .{ .Blob = this.Blob }, - .InternalBlob => .{ .InternalBlob = this.InternalBlob }, - .WTFStringImpl => |str| brk: { - if (str.toUTF8IfNeeded(bun.default_allocator)) |utf8| { - defer str.deref(); - break :brk .{ - .InternalBlob = InternalBlob{ - .bytes = std.ArrayList(u8).fromOwnedSlice(bun.default_allocator, @constCast(utf8.slice())), - .was_string = true, - }, - }; - } else { - break :brk .{ - .WTFStringImpl = str, - }; - } - }, - // .InlineBlob => .{ .InlineBlob = this.InlineBlob }, - .Locked => this.Locked.toAnyBlobAllowPromise() orelse AnyBlob{ .Blob = .{} }, - else => .{ .Blob = Blob.initEmpty(undefined) }, - }; - - this.* = if (this.* == .Null) - .{ .Null = {} } - else - .{ .Used = {} }; - return any_blob; - } - - pub fn useAsAnyBlobAllowNonUTF8String(this: *Value) AnyBlob { - const any_blob: AnyBlob = switch (this.*) { - .Blob => .{ .Blob = this.Blob }, - .InternalBlob => .{ .InternalBlob = this.InternalBlob }, - .WTFStringImpl => .{ .WTFStringImpl = this.WTFStringImpl }, - // .InlineBlob => .{ .InlineBlob = this.InlineBlob }, - .Locked => this.Locked.toAnyBlobAllowPromise() orelse AnyBlob{ .Blob = .{} }, - else => .{ .Blob = Blob.initEmpty(undefined) }, - }; - - this.* = if (this.* == .Null) - .{ .Null = {} } - else - .{ .Used = {} }; - return any_blob; - } - - pub fn toErrorInstance(this: *Value, err: ValueError, global: *JSGlobalObject) void { - if (this.* == .Locked) { - var locked = this.Locked; - this.* = .{ .Error = err }; - - var strong_readable = locked.readable; - locked.readable = .{}; - defer strong_readable.deinit(); - - if (locked.hasPendingPromise()) { - const promise = locked.promise.?; - defer promise.unprotect(); - locked.promise = null; - - if (promise.asAnyPromise()) |internal| { - internal.reject(global, this.Error.toJS(global)); - } - } - - // The Promise version goes before the ReadableStream version incase the Promise version is used too. - // Avoid creating unnecessary duplicate JSValue. - if (strong_readable.get(global)) |readable| { - if (readable.ptr == .Bytes) { - readable.ptr.Bytes.onData( - .{ .err = this.Error.toStreamError(global) }, - bun.default_allocator, - ); - } else { - readable.abort(global); - } - } - - if (locked.onReceiveValue) |onReceiveValue| { - locked.onReceiveValue = null; - onReceiveValue(locked.task.?, this); - } - return; - } - this.* = .{ .Error = err }; - } - - pub fn toError(this: *Value, err: anyerror, global: *JSGlobalObject) void { - return this.toErrorInstance(.{ .Message = bun.String.createFormat( - "Error reading file {s}", - .{@errorName(err)}, - ) catch bun.outOfMemory() }, global); - } - - pub fn deinit(this: *Value) void { - const tag = @as(Tag, this.*); - if (tag == .Locked) { - if (!this.Locked.deinit) { - this.Locked.deinit = true; - this.Locked.readable.deinit(); - this.Locked.readable = .{}; - } - - return; - } - - if (tag == .InternalBlob) { - this.InternalBlob.clearAndFree(); - this.* = Value{ .Null = {} }; - } - - if (tag == .Blob) { - this.Blob.deinit(); - this.* = Value{ .Null = {} }; - } - - if (tag == .WTFStringImpl) { - this.WTFStringImpl.deref(); - this.* = Value{ .Null = {} }; - } - - if (tag == .Error) { - this.Error.deinit(); - } - } - - pub fn tee(this: *Value, globalThis: *JSC.JSGlobalObject) Value { - var locked = &this.Locked; - - if (locked.readable.isDisturbed(globalThis)) { - return Value{ .Used = {} }; - } - - if (locked.readable.tee(globalThis)) |readable| { - return Value{ - .Locked = .{ - .readable = JSC.WebCore.ReadableStream.Strong.init(readable, globalThis), - .global = globalThis, - }, - }; - } - if (locked.promise != null or locked.action != .none or locked.readable.has()) { - return Value{ .Used = {} }; - } - - var drain_result: JSC.WebCore.DrainResult = .{ - .estimated_size = 0, - }; - - if (locked.onStartStreaming) |drain| { - locked.onStartStreaming = null; - drain_result = drain(locked.task.?); - } - - if (drain_result == .empty or drain_result == .aborted) { - this.* = .{ .Null = {} }; - return Value{ .Null = {} }; - } - - var reader = JSC.WebCore.ByteStream.Source.new(.{ - .context = undefined, - .globalThis = globalThis, - }); - - reader.context.setup(); - - if (drain_result == .estimated_size) { - reader.context.highWaterMark = @as(Blob.SizeType, @truncate(drain_result.estimated_size)); - reader.context.size_hint = @as(Blob.SizeType, @truncate(drain_result.estimated_size)); - } else if (drain_result == .owned) { - reader.context.buffer = drain_result.owned.list; - reader.context.size_hint = @as(Blob.SizeType, @truncate(drain_result.owned.size_hint)); - } - - locked.readable = JSC.WebCore.ReadableStream.Strong.init(.{ - .ptr = .{ .Bytes = &reader.context }, - .value = reader.toReadableStream(globalThis), - }, globalThis); - - if (locked.onReadableStreamAvailable) |onReadableStreamAvailable| { - onReadableStreamAvailable(locked.task.?, globalThis, locked.readable.get(globalThis).?); - } - - const teed = locked.readable.tee(globalThis) orelse return Value{ .Used = {} }; - - return Value{ - .Locked = .{ - .readable = JSC.WebCore.ReadableStream.Strong.init(teed, globalThis), - .global = globalThis, - }, - }; - } - - pub fn clone(this: *Value, globalThis: *JSC.JSGlobalObject) Value { - this.toBlobIfPossible(); - - if (this.* == .Locked) { - return this.tee(globalThis); - } - - if (this.* == .InternalBlob) { - var internal_blob = this.InternalBlob; - this.* = .{ - .Blob = Blob.init( - internal_blob.toOwnedSlice(), - internal_blob.bytes.allocator, - globalThis, - ), - }; - } - - // if (this.* == .InlineBlob) { - // return this.*; - // } - - if (this.* == .Blob) { - return Value{ .Blob = this.Blob.dupe() }; - } - - if (this.* == .WTFStringImpl) { - this.WTFStringImpl.ref(); - return Value{ .WTFStringImpl = this.WTFStringImpl }; - } - - if (this.* == .Null) { - return Value{ .Null = {} }; - } - - return Value{ .Empty = {} }; - } - }; - - // https://github.com/WebKit/webkit/blob/main/Source/WebCore/Modules/fetch/FetchBody.cpp#L45 - pub fn extract( - globalThis: *JSGlobalObject, - value: JSValue, - ) bun.JSError!Body { - var body = Body{ .value = Value{ .Null = {} } }; - - body.value = try Value.fromJS(globalThis, value); - if (body.value == .Blob) { - assert(body.value.Blob.allocator == null); // owned by Body - } - return body; - } -}; - -pub fn BodyMixin(comptime Type: type) type { - return struct { - pub fn getText( - this: *Type, - globalObject: *JSC.JSGlobalObject, - callframe: *JSC.CallFrame, - ) bun.JSError!JSC.JSValue { - var value: *Body.Value = this.getBodyValue(); - if (value.* == .Used) { - return handleBodyAlreadyUsed(globalObject); - } - - if (value.* == .Locked) { - if (value.Locked.action != .none or value.Locked.isDisturbed(Type, globalObject, callframe.this())) { - return handleBodyAlreadyUsed(globalObject); - } - - return value.Locked.setPromise(globalObject, .{ .getText = {} }); - } - - var blob = value.useAsAnyBlobAllowNonUTF8String(); - return JSC.JSPromise.wrap(globalObject, lifetimeWrap(AnyBlob.toString, .transfer), .{ &blob, globalObject }); - } - - pub fn getBody( - this: *Type, - globalThis: *JSC.JSGlobalObject, - ) JSValue { - var body: *Body.Value = this.getBodyValue(); - - if (body.* == .Used) { - return JSC.WebCore.ReadableStream.used(globalThis); - } - - return body.toReadableStream(globalThis); - } - - pub fn getBodyUsed( - this: *Type, - globalObject: *JSC.JSGlobalObject, - ) JSValue { - return JSValue.jsBoolean( - switch (this.getBodyValue().*) { - .Used => true, - .Locked => |*pending| brk: { - if (pending.action != .none) { - break :brk true; - } - - if (pending.readable.get(globalObject)) |*stream| { - break :brk stream.isDisturbed(globalObject); - } - - break :brk false; - }, - else => false, - }, - ); - } - - fn lifetimeWrap(comptime Fn: anytype, comptime lifetime: JSC.WebCore.Lifetime) fn (*AnyBlob, *JSC.JSGlobalObject) JSC.JSValue { - return struct { - fn wrap(this: *AnyBlob, globalObject: *JSC.JSGlobalObject) JSC.JSValue { - return JSC.toJSHostValue(globalObject, Fn(this, globalObject, lifetime)); - } - }.wrap; - } - - pub fn getJSON( - this: *Type, - globalObject: *JSC.JSGlobalObject, - callframe: *JSC.CallFrame, - ) bun.JSError!JSC.JSValue { - var value: *Body.Value = this.getBodyValue(); - if (value.* == .Used) { - return handleBodyAlreadyUsed(globalObject); - } - - if (value.* == .Locked) { - if (value.Locked.action != .none or value.Locked.isDisturbed(Type, globalObject, callframe.this())) { - return handleBodyAlreadyUsed(globalObject); - } - - value.toBlobIfPossible(); - if (value.* == .Locked) { - return value.Locked.setPromise(globalObject, .{ .getJSON = {} }); - } - } - - var blob = value.useAsAnyBlobAllowNonUTF8String(); - - return JSC.JSPromise.wrap(globalObject, lifetimeWrap(AnyBlob.toJSON, .share), .{ &blob, globalObject }); - } - - fn handleBodyAlreadyUsed(globalObject: *JSC.JSGlobalObject) JSValue { - return globalObject.ERR(.BODY_ALREADY_USED, "Body already used", .{}).reject(); - } - - pub fn getArrayBuffer( - this: *Type, - globalObject: *JSC.JSGlobalObject, - callframe: *JSC.CallFrame, - ) bun.JSError!JSC.JSValue { - var value: *Body.Value = this.getBodyValue(); - - if (value.* == .Used) { - return handleBodyAlreadyUsed(globalObject); - } - - if (value.* == .Locked) { - if (value.Locked.action != .none or value.Locked.isDisturbed(Type, globalObject, callframe.this())) { - return handleBodyAlreadyUsed(globalObject); - } - value.toBlobIfPossible(); - - if (value.* == .Locked) { - return value.Locked.setPromise(globalObject, .{ .getArrayBuffer = {} }); - } - } - - // toArrayBuffer in AnyBlob checks for non-UTF8 strings - var blob: AnyBlob = value.useAsAnyBlobAllowNonUTF8String(); - - return JSC.JSPromise.wrap(globalObject, lifetimeWrap(AnyBlob.toArrayBuffer, .transfer), .{ &blob, globalObject }); - } - - pub fn getBytes( - this: *Type, - globalObject: *JSC.JSGlobalObject, - callframe: *JSC.CallFrame, - ) bun.JSError!JSC.JSValue { - var value: *Body.Value = this.getBodyValue(); - - if (value.* == .Used) { - return handleBodyAlreadyUsed(globalObject); - } - - if (value.* == .Locked) { - if (value.Locked.action != .none or value.Locked.isDisturbed(Type, globalObject, callframe.this())) { - return handleBodyAlreadyUsed(globalObject); - } - value.toBlobIfPossible(); - if (value.* == .Locked) { - return value.Locked.setPromise(globalObject, .{ .getBytes = {} }); - } - } - - // toArrayBuffer in AnyBlob checks for non-UTF8 strings - var blob: AnyBlob = value.useAsAnyBlobAllowNonUTF8String(); - return JSC.JSPromise.wrap(globalObject, lifetimeWrap(AnyBlob.toUint8Array, .transfer), .{ &blob, globalObject }); - } - - pub fn getFormData( - this: *Type, - globalObject: *JSC.JSGlobalObject, - callframe: *JSC.CallFrame, - ) bun.JSError!JSC.JSValue { - var value: *Body.Value = this.getBodyValue(); - - if (value.* == .Used) { - return handleBodyAlreadyUsed(globalObject); - } - - if (value.* == .Locked) { - if (value.Locked.action != .none or value.Locked.isDisturbed(Type, globalObject, callframe.this())) { - return handleBodyAlreadyUsed(globalObject); - } - value.toBlobIfPossible(); - } - - var encoder = this.getFormDataEncoding() orelse { - // TODO: catch specific errors from getFormDataEncoding - return globalObject.ERR(.FORMDATA_PARSE_ERROR, "Can't decode form data from body because of incorrect MIME type/boundary", .{}).reject(); - }; - - if (value.* == .Locked) { - return value.Locked.setPromise(globalObject, .{ .getFormData = encoder }); - } - - var blob: AnyBlob = value.useAsAnyBlob(); - defer blob.detach(); - defer encoder.deinit(); - - const js_value = bun.FormData.toJS( - globalObject, - blob.slice(), - encoder.encoding, - ) catch |err| { - return globalObject.ERR( - .FORMDATA_PARSE_ERROR, - "FormData parse error {s}", - .{ - @errorName(err), - }, - ).reject(); - }; - - return JSC.JSPromise.wrapValue( - globalObject, - js_value, - ); - } - - pub fn getBlob( - this: *Type, - globalObject: *JSC.JSGlobalObject, - callframe: *JSC.CallFrame, - ) bun.JSError!JSC.JSValue { - return getBlobWithThisValue(this, globalObject, callframe.this()); - } - - pub fn getBlobWithThisValue( - this: *Type, - globalObject: *JSC.JSGlobalObject, - this_value: JSValue, - ) JSC.JSValue { - var value: *Body.Value = this.getBodyValue(); - - if (value.* == .Used) { - return handleBodyAlreadyUsed(globalObject); - } - - if (value.* == .Locked) { - if (value.Locked.action != .none or - ((this_value != .zero and value.Locked.isDisturbed(Type, globalObject, this_value)) or - (this_value == .zero and value.Locked.readable.isDisturbed(globalObject)))) - { - return handleBodyAlreadyUsed(globalObject); - } - - value.toBlobIfPossible(); - - if (value.* == .Locked) { - return value.Locked.setPromise(globalObject, .{ .getBlob = {} }); - } - } - - var blob = Blob.new(value.use()); - blob.allocator = getAllocator(globalObject); - if (blob.content_type.len == 0) { - if (this.getFetchHeaders()) |fetch_headers| { - if (fetch_headers.fastGet(.ContentType)) |content_type| { - var content_slice = content_type.toSlice(blob.allocator.?); - defer content_slice.deinit(); - var allocated = false; - const mimeType = MimeType.init(content_slice.slice(), blob.allocator.?, &allocated); - blob.content_type = mimeType.value; - blob.content_type_allocated = allocated; - blob.content_type_was_set = true; - if (blob.store != null) { - blob.store.?.mime_type = mimeType; - } - } - } - if (!blob.content_type_was_set and blob.store != null) { - blob.content_type = MimeType.text.value; - blob.content_type_allocated = false; - blob.content_type_was_set = true; - blob.store.?.mime_type = MimeType.text; - } - } - return JSC.JSPromise.resolvedPromiseValue(globalObject, blob.toJS(globalObject)); - } - - pub fn getBlobWithoutCallFrame( - this: *Type, - globalObject: *JSC.JSGlobalObject, - ) JSC.JSValue { - return getBlobWithThisValue(this, globalObject, .zero); - } - }; -} - -pub const BodyValueBufferer = struct { - const log = bun.Output.scoped(.BodyValueBufferer, false); - - const ArrayBufferSink = JSC.WebCore.ArrayBufferSink; - const Callback = *const fn (ctx: *anyopaque, bytes: []const u8, err: ?Body.Value.ValueError, is_async: bool) void; - - ctx: *anyopaque, - onFinishedBuffering: Callback, - - js_sink: ?*ArrayBufferSink.JSSink = null, - byte_stream: ?*JSC.WebCore.ByteStream = null, - // readable stream strong ref to keep byte stream alive - readable_stream_ref: JSC.WebCore.ReadableStream.Strong = .{}, - stream_buffer: bun.MutableString, - allocator: std.mem.Allocator, - global: *JSGlobalObject, - - pub fn deinit(this: *@This()) void { - this.stream_buffer.deinit(); - if (this.byte_stream) |byte_stream| { - byte_stream.unpipeWithoutDeref(); - } - this.readable_stream_ref.deinit(); - - if (this.js_sink) |buffer_stream| { - buffer_stream.detach(); - buffer_stream.sink.destroy(); - this.js_sink = null; - } - } - - pub fn init( - ctx: *anyopaque, - onFinish: Callback, - global: *JSGlobalObject, - allocator: std.mem.Allocator, - ) @This() { - const this: BodyValueBufferer = .{ - .ctx = ctx, - .onFinishedBuffering = onFinish, - .allocator = allocator, - .global = global, - .stream_buffer = .{ - .allocator = allocator, - .list = .{ - .items = &.{}, - .capacity = 0, - }, - }, - }; - return this; - } - - pub fn run(sink: *@This(), value: *JSC.WebCore.Body.Value) !void { - value.toBlobIfPossible(); - - switch (value.*) { - .Used => { - log("Used", .{}); - return error.StreamAlreadyUsed; - }, - .Empty, .Null => { - log("Empty", .{}); - return sink.onFinishedBuffering(sink.ctx, "", null, false); - }, - - .Error => |err| { - log("Error", .{}); - sink.onFinishedBuffering(sink.ctx, "", err, false); - return; - }, - // .InlineBlob, - .WTFStringImpl, - .InternalBlob, - .Blob, - => { - // toBlobIfPossible checks for WTFString needing a conversion. - var input = value.useAsAnyBlobAllowNonUTF8String(); - const is_pending = input.needsToReadFile(); - defer if (!is_pending) input.detach(); - - if (is_pending) { - input.Blob.doReadFileInternal(*@This(), sink, onFinishedLoadingFile, sink.global); - } else { - const bytes = input.slice(); - log("Blob {}", .{bytes.len}); - sink.onFinishedBuffering(sink.ctx, bytes, null, false); - } - return; - }, - .Locked => { - try sink.bufferLockedBodyValue(value); - }, - } - } - - fn onFinishedLoadingFile(sink: *@This(), bytes: Blob.ReadFileResultType) void { - switch (bytes) { - .err => |err| { - log("onFinishedLoadingFile Error", .{}); - sink.onFinishedBuffering(sink.ctx, "", .{ .SystemError = err }, true); - return; - }, - .result => |data| { - log("onFinishedLoadingFile Data {}", .{data.buf.len}); - sink.onFinishedBuffering(sink.ctx, data.buf, null, true); - if (data.is_temporary) { - bun.default_allocator.free(@constCast(data.buf)); - } - }, - } - } - fn onStreamPipe(sink: *@This(), stream: JSC.WebCore.StreamResult, allocator: std.mem.Allocator) void { - const stream_needs_deinit = stream == .owned or stream == .owned_and_done; - - defer { - if (stream_needs_deinit) { - if (stream == .owned_and_done) { - stream.owned_and_done.listManaged(allocator).deinit(); - } else { - stream.owned.listManaged(allocator).deinit(); - } - } - } - - const chunk = stream.slice(); - log("onStreamPipe chunk {}", .{chunk.len}); - _ = sink.stream_buffer.write(chunk) catch bun.outOfMemory(); - if (stream.isDone()) { - const bytes = sink.stream_buffer.list.items; - log("onStreamPipe done {}", .{bytes.len}); - sink.onFinishedBuffering(sink.ctx, bytes, null, true); - return; - } - } - - pub fn onResolveStream(_: *JSC.JSGlobalObject, callframe: *JSC.CallFrame) bun.JSError!JSC.JSValue { - var args = callframe.arguments_old(2); - var sink: *@This() = args.ptr[args.len - 1].asPromisePtr(@This()); - sink.handleResolveStream(true); - return JSValue.jsUndefined(); - } - - pub fn onRejectStream(_: *JSC.JSGlobalObject, callframe: *JSC.CallFrame) bun.JSError!JSC.JSValue { - const args = callframe.arguments_old(2); - var sink = args.ptr[args.len - 1].asPromisePtr(@This()); - const err = args.ptr[0]; - sink.handleRejectStream(err, true); - return JSValue.jsUndefined(); - } - - fn handleRejectStream(sink: *@This(), err: JSValue, is_async: bool) void { - if (sink.js_sink) |wrapper| { - wrapper.detach(); - sink.js_sink = null; - wrapper.sink.destroy(); - } - var ref = JSC.Strong.create(err, sink.global); - defer ref.deinit(); - sink.onFinishedBuffering(sink.ctx, "", .{ .JSValue = ref }, is_async); - } - - fn handleResolveStream(sink: *@This(), is_async: bool) void { - if (sink.js_sink) |wrapper| { - const bytes = wrapper.sink.bytes.slice(); - log("handleResolveStream {}", .{bytes.len}); - sink.onFinishedBuffering(sink.ctx, bytes, null, is_async); - } else { - log("handleResolveStream no sink", .{}); - sink.onFinishedBuffering(sink.ctx, "", null, is_async); - } - } - - fn createJSSink(sink: *@This(), stream: JSC.WebCore.ReadableStream) !void { - stream.value.ensureStillAlive(); - var allocator = sink.allocator; - var buffer_stream = try allocator.create(ArrayBufferSink.JSSink); - var globalThis = sink.global; - buffer_stream.* = ArrayBufferSink.JSSink{ - .sink = ArrayBufferSink{ - .bytes = bun.ByteList.init(&.{}), - .allocator = allocator, - .next = null, - }, - }; - var signal = &buffer_stream.sink.signal; - sink.js_sink = buffer_stream; - - signal.* = ArrayBufferSink.JSSink.SinkSignal.init(JSValue.zero); - - // explicitly set it to a dead pointer - // we use this memory address to disable signals being sent - signal.clear(); - assert(signal.isDead()); - - const assignment_result: JSValue = ArrayBufferSink.JSSink.assignToStream( - globalThis, - stream.value, - buffer_stream, - @as(**anyopaque, @ptrCast(&signal.ptr)), - ); - - assignment_result.ensureStillAlive(); - - // assert that it was updated - assert(!signal.isDead()); - - if (assignment_result.isError()) { - return error.PipeFailed; - } - - if (!assignment_result.isEmptyOrUndefinedOrNull()) { - assignment_result.ensureStillAlive(); - // it returns a Promise when it goes through ReadableStreamDefaultReader - if (assignment_result.asAnyPromise()) |promise| { - switch (promise.status(globalThis.vm())) { - .Pending => { - assignment_result.then( - globalThis, - sink, - onResolveStream, - onRejectStream, - ); - }, - .Fulfilled => { - defer stream.value.unprotect(); - - sink.handleResolveStream(false); - }, - .Rejected => { - defer stream.value.unprotect(); - - sink.handleRejectStream(promise.result(globalThis.vm()), false); - }, - } - return; - } - } - - return error.PipeFailed; - } - - fn bufferLockedBodyValue(sink: *@This(), value: *JSC.WebCore.Body.Value) !void { - assert(value.* == .Locked); - const locked = &value.Locked; - if (locked.readable.get(sink.global)) |stream| { - // keep the stream alive until we're done with it - sink.readable_stream_ref = locked.readable; - value.* = .{ .Used = {} }; - - if (stream.isLocked(sink.global)) { - return error.StreamAlreadyUsed; - } - - switch (stream.ptr) { - .Invalid => { - return error.InvalidStream; - }, - // toBlobIfPossible should've caught this - .Blob, .File => unreachable, - .JavaScript, .Direct => { - // this is broken right now - // return sink.createJSSink(stream); - return error.UnsupportedStreamType; - }, - .Bytes => |byte_stream| { - assert(byte_stream.pipe.ctx == null); - assert(sink.byte_stream == null); - - const bytes = byte_stream.buffer.items; - // If we've received the complete body by the time this function is called - // we can avoid streaming it and just send it all at once. - if (byte_stream.has_received_last_chunk) { - log("byte stream has_received_last_chunk {}", .{bytes.len}); - sink.onFinishedBuffering(sink.ctx, bytes, null, false); - // is safe to detach here because we're not going to receive any more data - stream.done(sink.global); - return; - } - - byte_stream.pipe = JSC.WebCore.Pipe.New(@This(), onStreamPipe).init(sink); - sink.byte_stream = byte_stream; - log("byte stream pre-buffered {}", .{bytes.len}); - - _ = sink.stream_buffer.write(bytes) catch bun.outOfMemory(); - return; - }, - } - } - - if (locked.onReceiveValue != null or locked.task != null) { - // someone else is waiting for the stream or waiting for `onStartStreaming` - const readable = value.toReadableStream(sink.global); - readable.ensureStillAlive(); - readable.protect(); - return try sink.bufferLockedBodyValue(value); - } - // is safe to wait it buffer - locked.task = @ptrCast(sink); - locked.onReceiveValue = @This().onReceiveValue; - } - - fn onReceiveValue(ctx: *anyopaque, value: *JSC.WebCore.Body.Value) void { - const sink = bun.cast(*@This(), ctx); - switch (value.*) { - .Error => |err| { - log("onReceiveValue Error", .{}); - sink.onFinishedBuffering(sink.ctx, "", err, true); - return; - }, - else => { - value.toBlobIfPossible(); - var input = value.useAsAnyBlobAllowNonUTF8String(); - const bytes = input.slice(); - log("onReceiveValue {}", .{bytes.len}); - sink.onFinishedBuffering(sink.ctx, bytes, null, true); - }, - } - } - - comptime { - const jsonResolveStream = JSC.toJSHostFunction(onResolveStream); - @export(&jsonResolveStream, .{ .name = "Bun__BodyValueBufferer__onResolveStream" }); - const jsonRejectStream = JSC.toJSHostFunction(onRejectStream); - @export(&jsonRejectStream, .{ .name = "Bun__BodyValueBufferer__onRejectStream" }); - } -}; - -const assert = bun.assert; diff --git a/src/bun.js/webcore/encoding.zig b/src/bun.js/webcore/encoding.zig index e2077779cb..705380fdeb 100644 --- a/src/bun.js/webcore/encoding.zig +++ b/src/bun.js/webcore/encoding.zig @@ -1,550 +1,531 @@ +//! Contains helpers for C++ to do TextEncoder/Decoder like operations. +//! Also contains the code used by `bun.String.encode` and `bun.String.encodeInto` + +export fn Bun__encoding__writeLatin1(input: [*]const u8, len: usize, to: [*]u8, to_len: usize, encoding: u8) usize { + return switch (@as(Encoding, @enumFromInt(encoding))) { + .utf8 => writeU8(input, len, to, to_len, .utf8), + .latin1 => writeU8(input, len, to, to_len, .latin1), + .ascii => writeU8(input, len, to, to_len, .ascii), + .ucs2 => writeU8(input, len, to, to_len, .utf16le), + .utf16le => writeU8(input, len, to, to_len, .utf16le), + .base64 => writeU8(input, len, to, to_len, .base64), + .base64url => writeU8(input, len, to, to_len, .base64url), + .hex => writeU8(input, len, to, to_len, .hex), + else => unreachable, + } catch 0; +} + +export fn Bun__encoding__writeUTF16(input: [*]const u16, len: usize, to: [*]u8, to_len: usize, encoding: u8) usize { + return switch (@as(Encoding, @enumFromInt(encoding))) { + .utf8 => writeU16(input, len, to, to_len, .utf8, false), + .latin1 => writeU16(input, len, to, to_len, .ascii, false), + .ascii => writeU16(input, len, to, to_len, .ascii, false), + .ucs2 => writeU16(input, len, to, to_len, .utf16le, false), + .utf16le => writeU16(input, len, to, to_len, .utf16le, false), + .base64 => writeU16(input, len, to, to_len, .base64, false), + .base64url => writeU16(input, len, to, to_len, .base64url, false), + .hex => writeU16(input, len, to, to_len, .hex, false), + else => unreachable, + } catch 0; +} + +// TODO(@190n) handle unpaired surrogates +export fn Bun__encoding__byteLengthLatin1AsUTF8(input: [*]const u8, len: usize) usize { + return byteLengthU8(input, len, .utf8); +} + +// TODO(@190n) handle unpaired surrogates +export fn Bun__encoding__byteLengthUTF16AsUTF8(input: [*]const u16, len: usize) usize { + return strings.elementLengthUTF16IntoUTF8([]const u16, input[0..len]); +} + +export fn Bun__encoding__constructFromLatin1(globalObject: *JSGlobalObject, input: [*]const u8, len: usize, encoding: u8) JSValue { + const slice = switch (@as(Encoding, @enumFromInt(encoding))) { + .hex => constructFromU8(input, len, bun.default_allocator, .hex), + .ascii => constructFromU8(input, len, bun.default_allocator, .ascii), + .base64url => constructFromU8(input, len, bun.default_allocator, .base64url), + .utf16le => constructFromU8(input, len, bun.default_allocator, .utf16le), + .ucs2 => constructFromU8(input, len, bun.default_allocator, .utf16le), + .utf8 => constructFromU8(input, len, bun.default_allocator, .utf8), + .base64 => constructFromU8(input, len, bun.default_allocator, .base64), + else => unreachable, + }; + return JSC.JSValue.createBuffer(globalObject, slice, globalObject.bunVM().allocator); +} + +export fn Bun__encoding__constructFromUTF16(globalObject: *JSGlobalObject, input: [*]const u16, len: usize, encoding: u8) JSValue { + const slice = switch (@as(Encoding, @enumFromInt(encoding))) { + .base64 => constructFromU16(input, len, bun.default_allocator, .base64), + .hex => constructFromU16(input, len, bun.default_allocator, .hex), + .base64url => constructFromU16(input, len, bun.default_allocator, .base64url), + .utf16le => constructFromU16(input, len, bun.default_allocator, .utf16le), + .ucs2 => constructFromU16(input, len, bun.default_allocator, .utf16le), + .utf8 => constructFromU16(input, len, bun.default_allocator, .utf8), + .ascii => constructFromU16(input, len, bun.default_allocator, .ascii), + .latin1 => constructFromU16(input, len, bun.default_allocator, .latin1), + else => unreachable, + }; + return JSC.JSValue.createBuffer(globalObject, slice, globalObject.bunVM().allocator); +} + +// for SQL statement +export fn Bun__encoding__toStringUTF8(input: [*]const u8, len: usize, globalObject: *JSC.JSGlobalObject) JSValue { + return toStringComptime(input[0..len], globalObject, .utf8); +} + +export fn Bun__encoding__toString(input: [*]const u8, len: usize, globalObject: *JSC.JSGlobalObject, encoding: u8) JSValue { + return toString(input[0..len], globalObject, @enumFromInt(encoding)); +} + +// pub fn writeUTF16AsUTF8(utf16: [*]const u16, len: usize, to: [*]u8, to_len: usize) callconv(.C) i32 { +// return @intCast(i32, strings.copyUTF16IntoUTF8(to[0..to_len], []const u16, utf16[0..len], true).written); +// } +pub fn toString(input: []const u8, globalObject: *JSGlobalObject, encoding: Encoding) JSValue { + return switch (encoding) { + // treat buffer as utf8 + // callers are expected to check that before constructing `Buffer` objects + .buffer, .utf8 => toStringComptime(input, globalObject, .utf8), + + inline else => |enc| toStringComptime(input, globalObject, enc), + }; +} + +pub fn toBunStringFromOwnedSlice(input: []u8, encoding: Encoding) bun.String { + if (input.len == 0) + return bun.String.empty; + + switch (encoding) { + .ascii => { + if (strings.isAllASCII(input)) { + return bun.String.createExternalGloballyAllocated(.latin1, input); + } + + const str, const chars = bun.String.createUninitialized(.latin1, input.len); + defer bun.default_allocator.free(input); + if (str.tag == .Dead) { + return str; + } + strings.copyLatin1IntoASCII(chars, input); + return str; + }, + .latin1 => { + return bun.String.createExternalGloballyAllocated(.latin1, input); + }, + .buffer, .utf8 => { + const converted = strings.toUTF16Alloc(bun.default_allocator, input, false, false) catch { + bun.default_allocator.free(input); + return bun.String.dead; + }; + + if (converted) |utf16| { + defer bun.default_allocator.free(input); + return bun.String.createExternalGloballyAllocated(.utf16, utf16); + } + + // If we get here, it means we can safely assume the string is 100% ASCII characters + return bun.String.createExternalGloballyAllocated(.latin1, input); + }, + .ucs2, .utf16le => { + // Avoid incomplete characters + if (input.len / 2 == 0) { + bun.default_allocator.free(input); + return bun.String.empty; + } + + const as_u16 = std.mem.bytesAsSlice(u16, input); + return bun.String.createExternalGloballyAllocated(.utf16, @alignCast(as_u16)); + }, + + .hex => { + defer bun.default_allocator.free(input); + const str, const chars = bun.String.createUninitialized(.latin1, input.len * 2); + + if (str.tag == .Dead) { + return str; + } + + const wrote = strings.encodeBytesToHex(chars, input); + + // Return an empty string in this case, just like node. + if (wrote < chars.len) { + str.deref(); + return bun.String.empty; + } + + return str; + }, + + // TODO: this is not right. There is an issue here. But it needs to + // be addressed separately because constructFromU8's base64url also + // appears inconsistent with Node.js. + .base64url => { + defer bun.default_allocator.free(input); + const out, const chars = bun.String.createUninitialized(.latin1, bun.base64.urlSafeEncodeLen(input)); + if (out.tag != .Dead) { + _ = bun.base64.encodeURLSafe(chars, input); + } + return out; + }, + + .base64 => { + defer bun.default_allocator.free(input); + const to_len = bun.base64.encodeLen(input); + const to = bun.default_allocator.alloc(u8, to_len) catch return bun.String.dead; + const wrote = bun.base64.encode(to, input); + return bun.String.createExternalGloballyAllocated(.latin1, to[0..wrote]); + }, + } +} + +pub fn toStringComptime(input: []const u8, global: *JSGlobalObject, comptime encoding: Encoding) JSValue { + var bun_string = toBunStringComptime(input, encoding); + return bun_string.transferToJS(global); +} + +pub fn toBunString(input: []const u8, encoding: Encoding) bun.String { + return switch (encoding) { + inline else => |enc| toBunStringComptime(input, enc), + }; +} + +pub fn toBunStringComptime(input: []const u8, comptime encoding: Encoding) bun.String { + if (input.len == 0) + return bun.String.empty; + + switch (comptime encoding) { + .ascii => { + const str, const chars = bun.String.createUninitialized(.latin1, input.len); + strings.copyLatin1IntoASCII(chars, input); + return str; + }, + .latin1 => { + const str, const chars = bun.String.createUninitialized(.latin1, input.len); + @memcpy(chars, input); + return str; + }, + .buffer, .utf8 => { + const converted = strings.toUTF16Alloc(bun.default_allocator, input, false, false) catch return bun.String.dead; + if (converted) |utf16| { + return bun.String.createExternalGloballyAllocated(.utf16, utf16); + } + + // If we get here, it means we can safely assume the string is 100% ASCII characters + // For this, we rely on WebKit to manage the memory. + return bun.String.createLatin1(input); + }, + .ucs2, .utf16le => { + // Avoid incomplete characters + if (input.len / 2 == 0) return bun.String.empty; + + const str, const chars = bun.String.createUninitialized(.utf16, input.len / 2); + var output_bytes = std.mem.sliceAsBytes(chars); + output_bytes[output_bytes.len - 1] = 0; + + @memcpy(output_bytes, input[0..output_bytes.len]); + return str; + }, + + .hex => { + const str, const chars = bun.String.createUninitialized(.latin1, input.len * 2); + + const wrote = strings.encodeBytesToHex(chars, input); + bun.assert(wrote == chars.len); + return str; + }, + + .base64url => { + const to_len = bun.base64.urlSafeEncodeLen(input); + const to = bun.default_allocator.alloc(u8, to_len) catch return bun.String.dead; + const wrote = bun.base64.encodeURLSafe(to, input); + return bun.String.createExternalGloballyAllocated(.latin1, to[0..wrote]); + }, + + .base64 => { + const to_len = bun.base64.encodeLen(input); + const to = bun.default_allocator.alloc(u8, to_len) catch return bun.String.dead; + const wrote = bun.base64.encode(to, input); + return bun.String.createExternalGloballyAllocated(.latin1, to[0..wrote]); + }, + } +} + +pub fn writeU8(input: [*]const u8, len: usize, to_ptr: [*]u8, to_len: usize, comptime encoding: Encoding) !usize { + if (len == 0 or to_len == 0) + return 0; + + // TODO: increase temporary buffer size for larger amounts of data + // defer { + // if (comptime encoding.isBinaryToText()) {} + // } + + // if (comptime encoding.isBinaryToText()) {} + + switch (comptime encoding) { + .buffer, .latin1 => { + const written = @min(len, to_len); + @memcpy(to_ptr[0..written], input[0..written]); + + return written; + }, + .ascii => { + const written = @min(len, to_len); + + const to = to_ptr[0..written]; + var remain = input[0..written]; + + if (bun.simdutf.validate.ascii(remain)) { + @memcpy(to_ptr[0..written], remain[0..written]); + } else { + strings.copyLatin1IntoASCII(to, remain); + } + + return written; + }, + .utf8 => { + // need to encode + return strings.copyLatin1IntoUTF8(to_ptr[0..to_len], []const u8, input[0..len]).written; + }, + // encode latin1 into UTF16 + .ucs2, .utf16le => { + if (to_len < 2) + return 0; + + if (std.mem.isAligned(@intFromPtr(to_ptr), @alignOf([*]u16))) { + const buf = input[0..len]; + + const output = @as([*]u16, @ptrCast(@alignCast(to_ptr)))[0 .. to_len / 2]; + const written = strings.copyLatin1IntoUTF16([]u16, output, []const u8, buf).written; + return written * 2; + } else { + const buf = input[0..len]; + const output = @as([*]align(1) u16, @ptrCast(to_ptr))[0 .. to_len / 2]; + + const written = strings.copyLatin1IntoUTF16([]align(1) u16, output, []const u8, buf).written; + return written * 2; + } + }, + + .hex => { + return strings.decodeHexToBytesTruncate(to_ptr[0..to_len], u8, input[0..len]); + }, + + .base64, .base64url => { + return bun.base64.decode(to_ptr[0..to_len], input[0..len]).count; + }, + } +} + +pub fn byteLengthU8(input: [*]const u8, len: usize, comptime encoding: Encoding) usize { + if (len == 0) + return 0; + + switch (comptime encoding) { + .utf8 => { + return strings.elementLengthLatin1IntoUTF8(input[0..len]); + }, + + .latin1, .ascii, .buffer => { + return len; + }, + + .ucs2, .utf16le => { + return strings.elementLengthUTF8IntoUTF16([]const u8, input[0..len]) * 2; + }, + + .hex => { + return len / 2; + }, + + .base64, .base64url => { + return bun.base64.decodeLen(input[0..len]); + }, + // else => return &[_]u8{}; + } +} + +pub fn encodeIntoFrom16(input: []const u16, to: []u8, comptime encoding: Encoding, comptime allow_partial_write: bool) !usize { + return writeU16(input.ptr, input.len, to.ptr, to.len, encoding, allow_partial_write); +} + +pub fn encodeIntoFrom8(input: []const u8, to: []u8, comptime encoding: Encoding) !usize { + return writeU8(input.ptr, input.len, to.ptr, to.len, encoding); +} + +pub fn writeU16(input: [*]const u16, len: usize, to: [*]u8, to_len: usize, comptime encoding: Encoding, comptime allow_partial_write: bool) !usize { + if (len == 0) + return 0; + + switch (comptime encoding) { + .utf8 => { + return strings.copyUTF16IntoUTF8(to[0..to_len], []const u16, input[0..len], allow_partial_write).written; + }, + .latin1, .ascii, .buffer => { + const out = @min(len, to_len); + strings.copyU16IntoU8(to[0..to_len], input[0..out]); + return out; + }, + // string is already encoded, just need to copy the data + .ucs2, .utf16le => { + if (allow_partial_write) { + const bytes_input_len = len * 2; + const written = @min(bytes_input_len, to_len); + const input_u8 = @as([*]const u8, @ptrCast(input)); + bun.memmove(to[0..written], input_u8[0..written]); + return written; + } else { + const bytes_input_len = len * 2; + const written = @min(bytes_input_len, to_len); + if (written < 2) return 0; + + const fixed_len = (written / 2) * 2; + const input_u8 = @as([*]const u8, @ptrCast(input)); + bun.memmove(to[0..written], input_u8[0..fixed_len]); + return fixed_len; + } + }, + + .hex => { + return strings.decodeHexToBytesTruncate(to[0..to_len], u16, input[0..len]); + }, + + .base64, .base64url => { + if (to_len < 2 or len == 0) + return 0; + + // very very slow case! + // shouldn't really happen though + const transcoded = strings.toUTF8Alloc(bun.default_allocator, input[0..len]) catch return 0; + defer bun.default_allocator.free(transcoded); + return writeU8(transcoded.ptr, transcoded.len, to, to_len, encoding); + }, + // else => return &[_]u8{}; + } +} + +pub fn constructFrom(comptime T: type, input: []const T, allocator: std.mem.Allocator, comptime encoding: Encoding) []u8 { + return switch (comptime T) { + u16 => constructFromU16(input.ptr, input.len, allocator, encoding), + u8 => constructFromU8(input.ptr, input.len, allocator, encoding), + else => @compileError("Unsupported type for constructFrom: " ++ @typeName(T)), + }; +} + +pub fn constructFromU8(input: [*]const u8, len: usize, allocator: std.mem.Allocator, comptime encoding: Encoding) []u8 { + if (len == 0) return &[_]u8{}; + + switch (comptime encoding) { + .buffer => { + var to = allocator.alloc(u8, len) catch return &[_]u8{}; + @memcpy(to[0..len], input[0..len]); + return to; + }, + .latin1, .ascii => { + var to = allocator.alloc(u8, len) catch return &[_]u8{}; + @memcpy(to[0..len], input[0..len]); + return to; + }, + .utf8 => { + // need to encode + return strings.allocateLatin1IntoUTF8(allocator, []const u8, input[0..len]) catch return &[_]u8{}; + }, + // encode latin1 into UTF16 + // return as bytes + .ucs2, .utf16le => { + var to = allocator.alloc(u16, len) catch return &[_]u8{}; + _ = strings.copyLatin1IntoUTF16([]u16, to, []const u8, input[0..len]); + return std.mem.sliceAsBytes(to[0..len]); + }, + + .hex => { + if (len < 2) + return &[_]u8{}; + + var to = allocator.alloc(u8, len / 2) catch return &[_]u8{}; + return to[0..strings.decodeHexToBytesTruncate(to, u8, input[0..len])]; + }, + + .base64, .base64url => { + const slice = strings.trim(input[0..len], "\r\n\t " ++ [_]u8{std.ascii.control_code.vt}); + if (slice.len == 0) return &[_]u8{}; + + const outlen = bun.base64.decodeLen(slice); + const to = allocator.alloc(u8, outlen) catch return &[_]u8{}; + + const wrote = bun.base64.decode(to[0..outlen], slice).count; + return to[0..wrote]; + }, + } +} + +pub fn constructFromU16(input: [*]const u16, len: usize, allocator: std.mem.Allocator, comptime encoding: Encoding) []u8 { + if (len == 0) return &[_]u8{}; + + switch (comptime encoding) { + .utf8 => { + return strings.toUTF8AllocWithType(allocator, []const u16, input[0..len]) catch return &[_]u8{}; + }, + .latin1, .buffer, .ascii => { + var to = allocator.alloc(u8, len) catch return &[_]u8{}; + strings.copyU16IntoU8(to[0..len], input[0..len]); + return to; + }, + // string is already encoded, just need to copy the data + .ucs2, .utf16le => { + var to = std.mem.sliceAsBytes(allocator.alloc(u16, len) catch return &[_]u8{}); + const bytes = std.mem.sliceAsBytes(input[0..len]); + @memcpy(to[0..bytes.len], bytes); + return to; + }, + + .hex => { + var to = allocator.alloc(u8, len * 2) catch return &[_]u8{}; + return to[0..strings.decodeHexToBytesTruncate(to, u16, input[0..len])]; + }, + + .base64, .base64url => { + // very very slow case! + // shouldn't really happen though + const transcoded = strings.toUTF8Alloc(allocator, input[0..len]) catch return &[_]u8{}; + defer allocator.free(transcoded); + return constructFromU8(transcoded.ptr, transcoded.len, allocator, encoding); + }, + } +} + +comptime { + _ = &Bun__encoding__writeLatin1; + _ = &Bun__encoding__writeUTF16; + _ = &Bun__encoding__byteLengthLatin1AsUTF8; + _ = &Bun__encoding__byteLengthUTF16AsUTF8; + _ = &Bun__encoding__toString; + _ = &Bun__encoding__toStringUTF8; + _ = &Bun__encoding__constructFromLatin1; + _ = &Bun__encoding__constructFromUTF16; +} + const std = @import("std"); const Api = @import("../../api/schema.zig").Api; const MimeType = bun.http.MimeType; const ZigURL = @import("../../url.zig").URL; const HTTPClient = bun.http; - const JSC = bun.JSC; - -const Method = @import("../../http/method.zig").Method; - -const ObjectPool = @import("../../pool.zig").ObjectPool; +const Encoding = JSC.Node.Encoding; +const Method = bun.http.Method; +const ObjectPool = bun.ObjectPool; const bun = @import("bun"); const Output = bun.Output; const MutableString = bun.MutableString; const strings = bun.strings; const string = bun.string; const FeatureFlags = bun.FeatureFlags; -const ArrayBuffer = @import("../base.zig").ArrayBuffer; +const ArrayBuffer = JSC.ArrayBuffer; const JSUint8Array = JSC.JSUint8Array; -const Properties = @import("../base.zig").Properties; - -const castObj = @import("../base.zig").castObj; -const getAllocator = @import("../base.zig").getAllocator; - -const Environment = @import("../../env.zig"); +const Environment = bun.Environment; const ZigString = JSC.ZigString; const JSInternalPromise = JSC.JSInternalPromise; const JSPromise = JSC.JSPromise; const JSValue = JSC.JSValue; const JSGlobalObject = JSC.JSGlobalObject; - const VirtualMachine = JSC.VirtualMachine; const Task = JSC.Task; - const picohttp = bun.picohttp; - -pub const TextEncoder = @import("./TextEncoder.zig"); -pub const EncodingLabel = @import("./EncodingLabel.zig").EncodingLabel; -pub const TextEncoderStreamEncoder = @import("./TextEncoderStreamEncoder.zig"); -pub const TextDecoder = @import("./TextDecoder.zig"); - -pub const Encoder = struct { - export fn Bun__encoding__writeLatin1(input: [*]const u8, len: usize, to: [*]u8, to_len: usize, encoding: u8) usize { - return switch (@as(JSC.Node.Encoding, @enumFromInt(encoding))) { - .utf8 => writeU8(input, len, to, to_len, .utf8), - .latin1 => writeU8(input, len, to, to_len, .latin1), - .ascii => writeU8(input, len, to, to_len, .ascii), - .ucs2 => writeU8(input, len, to, to_len, .utf16le), - .utf16le => writeU8(input, len, to, to_len, .utf16le), - .base64 => writeU8(input, len, to, to_len, .base64), - .base64url => writeU8(input, len, to, to_len, .base64url), - .hex => writeU8(input, len, to, to_len, .hex), - else => unreachable, - } catch 0; - } - export fn Bun__encoding__writeUTF16(input: [*]const u16, len: usize, to: [*]u8, to_len: usize, encoding: u8) usize { - return switch (@as(JSC.Node.Encoding, @enumFromInt(encoding))) { - .utf8 => writeU16(input, len, to, to_len, .utf8, false), - .latin1 => writeU16(input, len, to, to_len, .ascii, false), - .ascii => writeU16(input, len, to, to_len, .ascii, false), - .ucs2 => writeU16(input, len, to, to_len, .utf16le, false), - .utf16le => writeU16(input, len, to, to_len, .utf16le, false), - .base64 => writeU16(input, len, to, to_len, .base64, false), - .base64url => writeU16(input, len, to, to_len, .base64url, false), - .hex => writeU16(input, len, to, to_len, .hex, false), - else => unreachable, - } catch 0; - } - // TODO(@190n) handle unpaired surrogates - export fn Bun__encoding__byteLengthLatin1AsUTF8(input: [*]const u8, len: usize) usize { - return byteLengthU8(input, len, .utf8); - } - // TODO(@190n) handle unpaired surrogates - export fn Bun__encoding__byteLengthUTF16AsUTF8(input: [*]const u16, len: usize) usize { - return strings.elementLengthUTF16IntoUTF8([]const u16, input[0..len]); - } - export fn Bun__encoding__constructFromLatin1(globalObject: *JSGlobalObject, input: [*]const u8, len: usize, encoding: u8) JSValue { - const slice = switch (@as(JSC.Node.Encoding, @enumFromInt(encoding))) { - .hex => constructFromU8(input, len, bun.default_allocator, .hex), - .ascii => constructFromU8(input, len, bun.default_allocator, .ascii), - .base64url => constructFromU8(input, len, bun.default_allocator, .base64url), - .utf16le => constructFromU8(input, len, bun.default_allocator, .utf16le), - .ucs2 => constructFromU8(input, len, bun.default_allocator, .utf16le), - .utf8 => constructFromU8(input, len, bun.default_allocator, .utf8), - .base64 => constructFromU8(input, len, bun.default_allocator, .base64), - else => unreachable, - }; - return JSC.JSValue.createBuffer(globalObject, slice, globalObject.bunVM().allocator); - } - export fn Bun__encoding__constructFromUTF16(globalObject: *JSGlobalObject, input: [*]const u16, len: usize, encoding: u8) JSValue { - const slice = switch (@as(JSC.Node.Encoding, @enumFromInt(encoding))) { - .base64 => constructFromU16(input, len, bun.default_allocator, .base64), - .hex => constructFromU16(input, len, bun.default_allocator, .hex), - .base64url => constructFromU16(input, len, bun.default_allocator, .base64url), - .utf16le => constructFromU16(input, len, bun.default_allocator, .utf16le), - .ucs2 => constructFromU16(input, len, bun.default_allocator, .utf16le), - .utf8 => constructFromU16(input, len, bun.default_allocator, .utf8), - .ascii => constructFromU16(input, len, bun.default_allocator, .ascii), - .latin1 => constructFromU16(input, len, bun.default_allocator, .latin1), - else => unreachable, - }; - return JSC.JSValue.createBuffer(globalObject, slice, globalObject.bunVM().allocator); - } - - // for SQL statement - export fn Bun__encoding__toStringUTF8(input: [*]const u8, len: usize, globalObject: *JSC.JSGlobalObject) JSValue { - return toStringComptime(input[0..len], globalObject, .utf8); - } - - export fn Bun__encoding__toString(input: [*]const u8, len: usize, globalObject: *JSC.JSGlobalObject, encoding: u8) JSValue { - return toString(input[0..len], globalObject, @enumFromInt(encoding)); - } - - // pub fn writeUTF16AsUTF8(utf16: [*]const u16, len: usize, to: [*]u8, to_len: usize) callconv(.C) i32 { - // return @intCast(i32, strings.copyUTF16IntoUTF8(to[0..to_len], []const u16, utf16[0..len], true).written); - // } - pub fn toString(input: []const u8, globalObject: *JSGlobalObject, encoding: JSC.Node.Encoding) JSValue { - return switch (encoding) { - // treat buffer as utf8 - // callers are expected to check that before constructing `Buffer` objects - .buffer, .utf8 => toStringComptime(input, globalObject, .utf8), - - inline else => |enc| toStringComptime(input, globalObject, enc), - }; - } - - pub fn toBunStringFromOwnedSlice(input: []u8, encoding: JSC.Node.Encoding) bun.String { - if (input.len == 0) - return bun.String.empty; - - switch (encoding) { - .ascii => { - if (strings.isAllASCII(input)) { - return bun.String.createExternalGloballyAllocated(.latin1, input); - } - - const str, const chars = bun.String.createUninitialized(.latin1, input.len); - defer bun.default_allocator.free(input); - if (str.tag == .Dead) { - return str; - } - strings.copyLatin1IntoASCII(chars, input); - return str; - }, - .latin1 => { - return bun.String.createExternalGloballyAllocated(.latin1, input); - }, - .buffer, .utf8 => { - const converted = strings.toUTF16Alloc(bun.default_allocator, input, false, false) catch { - bun.default_allocator.free(input); - return bun.String.dead; - }; - - if (converted) |utf16| { - defer bun.default_allocator.free(input); - return bun.String.createExternalGloballyAllocated(.utf16, utf16); - } - - // If we get here, it means we can safely assume the string is 100% ASCII characters - return bun.String.createExternalGloballyAllocated(.latin1, input); - }, - .ucs2, .utf16le => { - // Avoid incomplete characters - if (input.len / 2 == 0) { - bun.default_allocator.free(input); - return bun.String.empty; - } - - const as_u16 = std.mem.bytesAsSlice(u16, input); - return bun.String.createExternalGloballyAllocated(.utf16, @alignCast(as_u16)); - }, - - .hex => { - defer bun.default_allocator.free(input); - const str, const chars = bun.String.createUninitialized(.latin1, input.len * 2); - - if (str.tag == .Dead) { - return str; - } - - const wrote = strings.encodeBytesToHex(chars, input); - - // Return an empty string in this case, just like node. - if (wrote < chars.len) { - str.deref(); - return bun.String.empty; - } - - return str; - }, - - // TODO: this is not right. There is an issue here. But it needs to - // be addressed separately because constructFromU8's base64url also - // appears inconsistent with Node.js. - .base64url => { - defer bun.default_allocator.free(input); - const out, const chars = bun.String.createUninitialized(.latin1, bun.base64.urlSafeEncodeLen(input)); - if (out.tag != .Dead) { - _ = bun.base64.encodeURLSafe(chars, input); - } - return out; - }, - - .base64 => { - defer bun.default_allocator.free(input); - const to_len = bun.base64.encodeLen(input); - const to = bun.default_allocator.alloc(u8, to_len) catch return bun.String.dead; - const wrote = bun.base64.encode(to, input); - return bun.String.createExternalGloballyAllocated(.latin1, to[0..wrote]); - }, - } - } - - pub fn toStringComptime(input: []const u8, global: *JSGlobalObject, comptime encoding: JSC.Node.Encoding) JSValue { - var bun_string = toBunStringComptime(input, encoding); - return bun_string.transferToJS(global); - } - - pub fn toBunString(input: []const u8, encoding: JSC.Node.Encoding) bun.String { - return switch (encoding) { - inline else => |enc| toBunStringComptime(input, enc), - }; - } - - pub fn toBunStringComptime(input: []const u8, comptime encoding: JSC.Node.Encoding) bun.String { - if (input.len == 0) - return bun.String.empty; - - switch (comptime encoding) { - .ascii => { - const str, const chars = bun.String.createUninitialized(.latin1, input.len); - strings.copyLatin1IntoASCII(chars, input); - return str; - }, - .latin1 => { - const str, const chars = bun.String.createUninitialized(.latin1, input.len); - @memcpy(chars, input); - return str; - }, - .buffer, .utf8 => { - const converted = strings.toUTF16Alloc(bun.default_allocator, input, false, false) catch return bun.String.dead; - if (converted) |utf16| { - return bun.String.createExternalGloballyAllocated(.utf16, utf16); - } - - // If we get here, it means we can safely assume the string is 100% ASCII characters - // For this, we rely on WebKit to manage the memory. - return bun.String.createLatin1(input); - }, - .ucs2, .utf16le => { - // Avoid incomplete characters - if (input.len / 2 == 0) return bun.String.empty; - - const str, const chars = bun.String.createUninitialized(.utf16, input.len / 2); - var output_bytes = std.mem.sliceAsBytes(chars); - output_bytes[output_bytes.len - 1] = 0; - - @memcpy(output_bytes, input[0..output_bytes.len]); - return str; - }, - - .hex => { - const str, const chars = bun.String.createUninitialized(.latin1, input.len * 2); - - const wrote = strings.encodeBytesToHex(chars, input); - bun.assert(wrote == chars.len); - return str; - }, - - .base64url => { - const to_len = bun.base64.urlSafeEncodeLen(input); - const to = bun.default_allocator.alloc(u8, to_len) catch return bun.String.dead; - const wrote = bun.base64.encodeURLSafe(to, input); - return bun.String.createExternalGloballyAllocated(.latin1, to[0..wrote]); - }, - - .base64 => { - const to_len = bun.base64.encodeLen(input); - const to = bun.default_allocator.alloc(u8, to_len) catch return bun.String.dead; - const wrote = bun.base64.encode(to, input); - return bun.String.createExternalGloballyAllocated(.latin1, to[0..wrote]); - }, - } - } - - pub fn writeU8(input: [*]const u8, len: usize, to_ptr: [*]u8, to_len: usize, comptime encoding: JSC.Node.Encoding) !usize { - if (len == 0 or to_len == 0) - return 0; - - // TODO: increase temporary buffer size for larger amounts of data - // defer { - // if (comptime encoding.isBinaryToText()) {} - // } - - // if (comptime encoding.isBinaryToText()) {} - - switch (comptime encoding) { - .buffer, .latin1 => { - const written = @min(len, to_len); - @memcpy(to_ptr[0..written], input[0..written]); - - return written; - }, - .ascii => { - const written = @min(len, to_len); - - const to = to_ptr[0..written]; - var remain = input[0..written]; - - if (bun.simdutf.validate.ascii(remain)) { - @memcpy(to_ptr[0..written], remain[0..written]); - } else { - strings.copyLatin1IntoASCII(to, remain); - } - - return written; - }, - .utf8 => { - // need to encode - return strings.copyLatin1IntoUTF8(to_ptr[0..to_len], []const u8, input[0..len]).written; - }, - // encode latin1 into UTF16 - .ucs2, .utf16le => { - if (to_len < 2) - return 0; - - if (std.mem.isAligned(@intFromPtr(to_ptr), @alignOf([*]u16))) { - const buf = input[0..len]; - - const output = @as([*]u16, @ptrCast(@alignCast(to_ptr)))[0 .. to_len / 2]; - const written = strings.copyLatin1IntoUTF16([]u16, output, []const u8, buf).written; - return written * 2; - } else { - const buf = input[0..len]; - const output = @as([*]align(1) u16, @ptrCast(to_ptr))[0 .. to_len / 2]; - - const written = strings.copyLatin1IntoUTF16([]align(1) u16, output, []const u8, buf).written; - return written * 2; - } - }, - - .hex => { - return strings.decodeHexToBytesTruncate(to_ptr[0..to_len], u8, input[0..len]); - }, - - .base64, .base64url => { - return bun.base64.decode(to_ptr[0..to_len], input[0..len]).count; - }, - } - } - - pub fn byteLengthU8(input: [*]const u8, len: usize, comptime encoding: JSC.Node.Encoding) usize { - if (len == 0) - return 0; - - switch (comptime encoding) { - .utf8 => { - return strings.elementLengthLatin1IntoUTF8(input[0..len]); - }, - - .latin1, .ascii, .buffer => { - return len; - }, - - .ucs2, .utf16le => { - return strings.elementLengthUTF8IntoUTF16([]const u8, input[0..len]) * 2; - }, - - .hex => { - return len / 2; - }, - - .base64, .base64url => { - return bun.base64.decodeLen(input[0..len]); - }, - // else => return &[_]u8{}; - } - } - - pub fn encodeIntoFrom16(input: []const u16, to: []u8, comptime encoding: JSC.Node.Encoding, comptime allow_partial_write: bool) !usize { - return writeU16(input.ptr, input.len, to.ptr, to.len, encoding, allow_partial_write); - } - - pub fn encodeIntoFrom8(input: []const u8, to: []u8, comptime encoding: JSC.Node.Encoding) !usize { - return writeU8(input.ptr, input.len, to.ptr, to.len, encoding); - } - - pub fn writeU16(input: [*]const u16, len: usize, to: [*]u8, to_len: usize, comptime encoding: JSC.Node.Encoding, comptime allow_partial_write: bool) !usize { - if (len == 0) - return 0; - - switch (comptime encoding) { - .utf8 => { - return strings.copyUTF16IntoUTF8(to[0..to_len], []const u16, input[0..len], allow_partial_write).written; - }, - .latin1, .ascii, .buffer => { - const out = @min(len, to_len); - strings.copyU16IntoU8(to[0..to_len], input[0..out]); - return out; - }, - // string is already encoded, just need to copy the data - .ucs2, .utf16le => { - if (allow_partial_write) { - const bytes_input_len = len * 2; - const written = @min(bytes_input_len, to_len); - const input_u8 = @as([*]const u8, @ptrCast(input)); - bun.memmove(to[0..written], input_u8[0..written]); - return written; - } else { - const bytes_input_len = len * 2; - const written = @min(bytes_input_len, to_len); - if (written < 2) return 0; - - const fixed_len = (written / 2) * 2; - const input_u8 = @as([*]const u8, @ptrCast(input)); - bun.memmove(to[0..written], input_u8[0..fixed_len]); - return fixed_len; - } - }, - - .hex => { - return strings.decodeHexToBytesTruncate(to[0..to_len], u16, input[0..len]); - }, - - .base64, .base64url => { - if (to_len < 2 or len == 0) - return 0; - - // very very slow case! - // shouldn't really happen though - const transcoded = strings.toUTF8Alloc(bun.default_allocator, input[0..len]) catch return 0; - defer bun.default_allocator.free(transcoded); - return writeU8(transcoded.ptr, transcoded.len, to, to_len, encoding); - }, - // else => return &[_]u8{}; - } - } - - pub fn constructFrom(comptime T: type, input: []const T, allocator: std.mem.Allocator, comptime encoding: JSC.Node.Encoding) []u8 { - return switch (comptime T) { - u16 => constructFromU16(input.ptr, input.len, allocator, encoding), - u8 => constructFromU8(input.ptr, input.len, allocator, encoding), - else => @compileError("Unsupported type for constructFrom: " ++ @typeName(T)), - }; - } - - pub fn constructFromU8(input: [*]const u8, len: usize, allocator: std.mem.Allocator, comptime encoding: JSC.Node.Encoding) []u8 { - if (len == 0) return &[_]u8{}; - - switch (comptime encoding) { - .buffer => { - var to = allocator.alloc(u8, len) catch return &[_]u8{}; - @memcpy(to[0..len], input[0..len]); - - return to; - }, - .latin1, .ascii => { - var to = allocator.alloc(u8, len) catch return &[_]u8{}; - - @memcpy(to[0..len], input[0..len]); - - return to; - }, - .utf8 => { - // need to encode - return strings.allocateLatin1IntoUTF8(allocator, []const u8, input[0..len]) catch return &[_]u8{}; - }, - // encode latin1 into UTF16 - // return as bytes - .ucs2, .utf16le => { - var to = allocator.alloc(u16, len) catch return &[_]u8{}; - _ = strings.copyLatin1IntoUTF16([]u16, to, []const u8, input[0..len]); - return std.mem.sliceAsBytes(to[0..len]); - }, - - .hex => { - if (len < 2) - return &[_]u8{}; - - var to = allocator.alloc(u8, len / 2) catch return &[_]u8{}; - return to[0..strings.decodeHexToBytesTruncate(to, u8, input[0..len])]; - }, - - .base64, .base64url => { - const slice = strings.trim(input[0..len], "\r\n\t " ++ [_]u8{std.ascii.control_code.vt}); - if (slice.len == 0) return &[_]u8{}; - - const outlen = bun.base64.decodeLen(slice); - const to = allocator.alloc(u8, outlen) catch return &[_]u8{}; - - const wrote = bun.base64.decode(to[0..outlen], slice).count; - return to[0..wrote]; - }, - } - } - - pub fn constructFromU16(input: [*]const u16, len: usize, allocator: std.mem.Allocator, comptime encoding: JSC.Node.Encoding) []u8 { - if (len == 0) return &[_]u8{}; - - switch (comptime encoding) { - .utf8 => { - return strings.toUTF8AllocWithType(allocator, []const u16, input[0..len]) catch return &[_]u8{}; - }, - .latin1, .buffer, .ascii => { - var to = allocator.alloc(u8, len) catch return &[_]u8{}; - strings.copyU16IntoU8(to[0..len], input[0..len]); - return to; - }, - // string is already encoded, just need to copy the data - .ucs2, .utf16le => { - var to = std.mem.sliceAsBytes(allocator.alloc(u16, len) catch return &[_]u8{}); - const bytes = std.mem.sliceAsBytes(input[0..len]); - @memcpy(to[0..bytes.len], bytes); - return to; - }, - - .hex => { - var to = allocator.alloc(u8, len * 2) catch return &[_]u8{}; - return to[0..strings.decodeHexToBytesTruncate(to, u16, input[0..len])]; - }, - - .base64, .base64url => { - // very very slow case! - // shouldn't really happen though - const transcoded = strings.toUTF8Alloc(allocator, input[0..len]) catch return &[_]u8{}; - defer allocator.free(transcoded); - return constructFromU8(transcoded.ptr, transcoded.len, allocator, encoding); - }, - } - } -}; - -comptime { - _ = &TextEncoder.TextEncoder__encode8; - _ = &TextEncoder.TextEncoder__encode16; - _ = &TextEncoder.TextEncoder__encodeInto8; - _ = &TextEncoder.TextEncoder__encodeInto16; - _ = &TextEncoder.TextEncoder__encodeRopeString; -} - -comptime { - _ = &Encoder.Bun__encoding__writeLatin1; - _ = &Encoder.Bun__encoding__writeUTF16; - _ = &Encoder.Bun__encoding__byteLengthLatin1AsUTF8; - _ = &Encoder.Bun__encoding__byteLengthUTF16AsUTF8; - _ = &Encoder.Bun__encoding__toString; - _ = &Encoder.Bun__encoding__toStringUTF8; - _ = &Encoder.Bun__encoding__constructFromLatin1; - _ = &Encoder.Bun__encoding__constructFromUTF16; -} diff --git a/src/bun.js/webcore/fetch.zig b/src/bun.js/webcore/fetch.zig index c0143dd700..9454eae135 100644 --- a/src/bun.js/webcore/fetch.zig +++ b/src/bun.js/webcore/fetch.zig @@ -90,7 +90,7 @@ pub const FetchTasklet = struct { promise: JSC.JSPromise.Strong, concurrent_task: JSC.ConcurrentTask = .{}, poll_ref: Async.KeepAlive = .{}, - memory_reporter: *JSC.MemoryReportingAllocator, + memory_reporter: *bun.MemoryReportingAllocator, /// For Http Client requests /// when Content-Length is provided this represents the whole size of the request /// If chunked encoded this will represent the total received size (ignoring the chunk headers) @@ -119,7 +119,7 @@ pub const FetchTasklet = struct { is_waiting_request_stream_start: bool = false, mutex: Mutex, - tracker: JSC.AsyncTaskTracker, + tracker: JSC.Debugger.AsyncTaskTracker, ref_count: std.atomic.Value(u32) = std.atomic.Value(u32).init(1), @@ -378,9 +378,9 @@ pub const FetchTasklet = struct { return JSValue.jsUndefined(); } comptime { - const jsonResolveRequestStream = JSC.toJSHostFunction(onResolveRequestStream); + const jsonResolveRequestStream = JSC.toJSHostFn(onResolveRequestStream); @export(&jsonResolveRequestStream, .{ .name = "Bun__FetchTasklet__onResolveRequestStream" }); - const jsonRejectRequestStream = JSC.toJSHostFunction(onRejectRequestStream); + const jsonRejectRequestStream = JSC.toJSHostFn(onRejectRequestStream); @export(&jsonRejectRequestStream, .{ .name = "Bun__FetchTasklet__onRejectRequestStream" }); } @@ -1173,7 +1173,7 @@ pub const FetchTasklet = struct { .url_proxy_buffer = fetch_options.url_proxy_buffer, .signal = fetch_options.signal, .hostname = fetch_options.hostname, - .tracker = JSC.AsyncTaskTracker.init(jsc_vm), + .tracker = JSC.Debugger.AsyncTaskTracker.init(jsc_vm), .memory_reporter = fetch_options.memory_reporter, .check_server_identity = fetch_options.check_server_identity, .reject_unauthorized = fetch_options.reject_unauthorized, @@ -1309,7 +1309,7 @@ pub const FetchTasklet = struct { globalThis: ?*JSGlobalObject, // Custom Hostname hostname: ?[]u8 = null, - memory_reporter: *JSC.MemoryReportingAllocator, + memory_reporter: *bun.MemoryReportingAllocator, check_server_identity: JSC.Strong = .empty, unix_socket_path: ZigString.Slice, ssl_config: ?*SSLConfig = null, @@ -1425,7 +1425,7 @@ fn dataURLResponse( var data_url = _data_url; const data = data_url.decodeData(allocator) catch { - const err = JSC.createError(globalThis, "failed to fetch the data URL", .{}); + const err = globalThis.createError("failed to fetch the data URL", .{}); return JSPromise.dangerouslyCreateRejectedPromiseValueWithoutNotifyingVM(globalThis, err); }; var blob = Blob.init(data, allocator, globalThis); @@ -1457,7 +1457,7 @@ fn dataURLResponse( } comptime { - const Bun__fetchPreconnect = JSC.toJSHostFunction(Bun__fetchPreconnect_); + const Bun__fetchPreconnect = JSC.toJSHostFn(Bun__fetchPreconnect_); @export(&Bun__fetchPreconnect, .{ .name = "Bun__fetchPreconnect" }); } pub fn Bun__fetchPreconnect_( @@ -1518,7 +1518,7 @@ const StringOrURL = struct { }; comptime { - const Bun__fetch = JSC.toJSHostFunction(Bun__fetch_); + const Bun__fetch = JSC.toJSHostFn(Bun__fetch_); @export(&Bun__fetch, .{ .name = "Bun__fetch" }); } @@ -1533,7 +1533,7 @@ pub fn Bun__fetch_( bun.Analytics.Features.fetch += 1; const vm = JSC.VirtualMachine.get(); - var memory_reporter = bun.default_allocator.create(JSC.MemoryReportingAllocator) catch bun.outOfMemory(); + var memory_reporter = bun.default_allocator.create(bun.MemoryReportingAllocator) catch bun.outOfMemory(); // used to clean up dynamically allocated memory on error (a poor man's errdefer) var is_error = false; var allocator = memory_reporter.wrap(bun.default_allocator); @@ -1545,14 +1545,14 @@ pub fn Bun__fetch_( } if (arguments.len == 0) { - const err = JSC.toTypeError(.MISSING_ARGS, fetch_error_no_args, .{}, ctx); + const err = ctx.toTypeError(.MISSING_ARGS, fetch_error_no_args, .{}); return JSPromise.dangerouslyCreateRejectedPromiseValueWithoutNotifyingVM(globalThis, err); } var headers: ?Headers = null; var method = Method.GET; - var args = JSC.Node.ArgumentsSlice.init(vm, arguments.slice()); + var args = JSC.CallFrame.ArgumentsSlice.init(vm, arguments.slice()); var url = ZigURL{}; var first_arg = args.nextEat().?; @@ -1689,7 +1689,7 @@ pub fn Bun__fetch_( if (url_str.isEmpty()) { is_error = true; - const err = JSC.toTypeError(.INVALID_URL, fetch_error_blank_url, .{}, ctx); + const err = ctx.toTypeError(.INVALID_URL, fetch_error_blank_url, .{}); return JSPromise.dangerouslyCreateRejectedPromiseValueWithoutNotifyingVM(globalThis, err); } @@ -1698,7 +1698,7 @@ pub fn Bun__fetch_( defer url_slice.deinit(); var data_url = DataURL.parseWithoutCheck(url_slice.slice()) catch { - const err = JSC.createError(globalThis, "failed to fetch the data URL", .{}); + const err = ctx.createError("failed to fetch the data URL", .{}); is_error = true; return JSPromise.dangerouslyCreateRejectedPromiseValueWithoutNotifyingVM(globalThis, err); }; @@ -1708,7 +1708,7 @@ pub fn Bun__fetch_( } url = ZigURL.fromString(allocator, url_str) catch { - const err = JSC.toTypeError(.INVALID_URL, "fetch() URL is invalid", .{}, ctx); + const err = ctx.toTypeError(.INVALID_URL, "fetch() URL is invalid", .{}); is_error = true; return JSPromise.dangerouslyCreateRejectedPromiseValueWithoutNotifyingVM( globalThis, @@ -1727,7 +1727,7 @@ pub fn Bun__fetch_( defer url_slice.deinit(); var data_url = DataURL.parseWithoutCheck(url_slice.slice()) catch { - const err = JSC.createError(globalThis, "failed to fetch the data URL", .{}); + const err = globalThis.createError("failed to fetch the data URL", .{}); return JSPromise.dangerouslyCreateRejectedPromiseValueWithoutNotifyingVM(globalThis, err); }; data_url.url = url_str; @@ -2007,7 +2007,7 @@ pub fn Bun__fetch_( if (proxy_arg.isString() and proxy_arg.getLength(ctx) > 0) { var href = try JSC.URL.hrefFromJS(proxy_arg, globalThis); if (href.tag == .Dead) { - const err = JSC.toTypeError(.INVALID_ARG_VALUE, "fetch() proxy URL is invalid", .{}, ctx); + const err = ctx.toTypeError(.INVALID_ARG_VALUE, "fetch() proxy URL is invalid", .{}); is_error = true; return JSPromise.dangerouslyCreateRejectedPromiseValueWithoutNotifyingVM(globalThis, err); } @@ -2140,14 +2140,14 @@ pub fn Bun__fetch_( // headers: Headers | undefined; headers = extract_headers: { - var fetch_headers_to_deref: ?*JSC.FetchHeaders = null; + var fetch_headers_to_deref: ?*bun.webcore.FetchHeaders = null; defer { if (fetch_headers_to_deref) |fetch_headers| { fetch_headers.deref(); } } - const fetch_headers: ?*JSC.FetchHeaders = brk: { + const fetch_headers: ?*bun.webcore.FetchHeaders = brk: { if (options_object) |options| { if (options.fastGet(globalThis, .headers)) |headers_value| { if (!headers_value.isUndefined()) { @@ -2217,7 +2217,7 @@ pub fn Bun__fetch_( } if (fetch_headers) |headers_| { - if (headers_.fastGet(JSC.FetchHeaders.HTTPHeaderName.Host)) |_hostname| { + if (headers_.fastGet(bun.webcore.FetchHeaders.HTTPHeaderName.Host)) |_hostname| { if (hostname) |host| { hostname = null; allocator.free(host); @@ -2225,7 +2225,7 @@ pub fn Bun__fetch_( hostname = _hostname.toOwnedSliceZ(allocator) catch bun.outOfMemory(); } if (url.isS3()) { - if (headers_.fastGet(JSC.FetchHeaders.HTTPHeaderName.Range)) |_range| { + if (headers_.fastGet(bun.webcore.FetchHeaders.HTTPHeaderName.Range)) |_range| { if (range) |range_| { range = null; allocator.free(range_); @@ -2247,7 +2247,7 @@ pub fn Bun__fetch_( if (proxy != null and unix_socket_path.length() > 0) { is_error = true; - const err = JSC.toTypeError(.INVALID_ARG_VALUE, fetch_error_proxy_unix, .{}, ctx); + const err = ctx.toTypeError(.INVALID_ARG_VALUE, fetch_error_proxy_unix, .{}); return JSPromise.dangerouslyCreateRejectedPromiseValueWithoutNotifyingVM(globalThis, err); } @@ -2288,9 +2288,9 @@ pub fn Bun__fetch_( break :blob blob; } else { // Consistent with what Node.js does - it rejects, not a 404. - const err = JSC.toTypeError(.INVALID_ARG_VALUE, "Failed to resolve blob:{s}", .{ + const err = globalThis.toTypeError(.INVALID_ARG_VALUE, "Failed to resolve blob:{s}", .{ url_path_decoded, - }, ctx); + }); is_error = true; return JSPromise.dangerouslyCreateRejectedPromiseValueWithoutNotifyingVM(globalThis, err); } @@ -2363,14 +2363,14 @@ pub fn Bun__fetch_( if (url.protocol.len > 0) { if (!(url.isHTTP() or url.isHTTPS() or url.isS3())) { - const err = JSC.toTypeError(.INVALID_ARG_VALUE, "protocol must be http:, https: or s3:", .{}, ctx); + const err = globalThis.toTypeError(.INVALID_ARG_VALUE, "protocol must be http:, https: or s3:", .{}); is_error = true; return JSPromise.dangerouslyCreateRejectedPromiseValueWithoutNotifyingVM(globalThis, err); } } if (!method.hasRequestBody() and body.hasBody()) { - const err = JSC.toTypeError(.INVALID_ARG_VALUE, fetch_error_unexpected_body, .{}, ctx); + const err = globalThis.toTypeError(.INVALID_ARG_VALUE, fetch_error_unexpected_body, .{}); is_error = true; return JSPromise.dangerouslyCreateRejectedPromiseValueWithoutNotifyingVM(globalThis, err); } @@ -2463,7 +2463,7 @@ pub fn Bun__fetch_( } // TODO: make this async + lazy - const res = JSC.Node.NodeFS.readFile( + const res = JSC.Node.fs.NodeFS.readFile( globalThis.bunVM().nodeFS(), .{ .encoding = .buffer, @@ -2731,6 +2731,7 @@ fn setHeaders(headers: *?Headers, new_headers: []const picohttp.Header, allocato headers_.deinit(); } } + const std = @import("std"); const bun = @import("bun"); const JSC = bun.JSC; @@ -2750,7 +2751,7 @@ const FetchRedirect = http.FetchRedirect; const Blob = JSC.WebCore.Blob; const Response = JSC.WebCore.Response; const Request = JSC.WebCore.Request; -const Headers = JSC.WebCore.Headers; +const Headers = bun.http.Headers; const Method = @import("../../http/method.zig").Method; const Body = JSC.WebCore.Body; const Async = bun.Async; @@ -2758,9 +2759,9 @@ const SSLConfig = @import("../api/server.zig").ServerConfig.SSLConfig; const Mutex = bun.Mutex; const BoringSSL = bun.BoringSSL.c; const X509 = @import("../api/bun/x509.zig"); -const FetchHeaders = JSC.FetchHeaders; +const FetchHeaders = bun.webcore.FetchHeaders; const Environment = bun.Environment; const PosixToWinNormalizer = bun.path.PosixToWinNormalizer; -const AnyBlob = JSC.WebCore.AnyBlob; +const AnyBlob = JSC.WebCore.Blob.Any; const s3 = bun.S3; const picohttp = bun.picohttp; diff --git a/src/bun.js/webcore/prompt.zig b/src/bun.js/webcore/prompt.zig new file mode 100644 index 0000000000..c1f2939b8b --- /dev/null +++ b/src/bun.js/webcore/prompt.zig @@ -0,0 +1,350 @@ +//! Implements prompt, alert, and confirm Web API +comptime { + const js_alert = JSC.toJSHostFn(alert); + @export(&js_alert, .{ .name = "WebCore__alert" }); + const js_prompt = JSC.toJSHostFn(prompt.call); + @export(&js_prompt, .{ .name = "WebCore__prompt" }); + const js_confirm = JSC.toJSHostFn(confirm); + @export(&js_confirm, .{ .name = "WebCore__confirm" }); +} + +/// https://html.spec.whatwg.org/multipage/timers-and-user-prompts.html#dom-alert +fn alert(globalObject: *JSC.JSGlobalObject, callframe: *JSC.CallFrame) bun.JSError!JSC.JSValue { + const arguments = callframe.arguments_old(1).slice(); + var output = bun.Output.writer(); + const has_message = arguments.len != 0; + + // 2. If the method was invoked with no arguments, then let message be the empty string; otherwise, let message be the method's first argument. + if (has_message) { + var state = std.heap.stackFallback(2048, bun.default_allocator); + const allocator = state.get(); + const message = try arguments[0].toSlice(globalObject, allocator); + defer message.deinit(); + + if (message.len > 0) { + // 3. Set message to the result of normalizing newlines given message. + // * We skip step 3 because they are already done in most terminals by default. + + // 4. Set message to the result of optionally truncating message. + // * We just don't do this because it's not necessary. + + // 5. Show message to the user, treating U+000A LF as a line break. + output.writeAll(message.slice()) catch { + // 1. If we cannot show simple dialogs for this, then return. + return .undefined; + }; + } + } + + output.writeAll(if (has_message) " [Enter] " else "Alert [Enter] ") catch { + // 1. If we cannot show simple dialogs for this, then return. + return .undefined; + }; + + // 6. Invoke WebDriver BiDi user prompt opened with this, "alert", and message. + // * Not pertinent to use their complex system in a server context. + bun.Output.flush(); + + // 7. Optionally, pause while waiting for the user to acknowledge the message. + var stdin = std.io.getStdIn(); + var reader = stdin.reader(); + while (true) { + const byte = reader.readByte() catch break; + if (byte == '\n') break; + } + + // 8. Invoke WebDriver BiDi user prompt closed with this and true. + // * Again, not necessary in a server context. + + return .undefined; +} + +fn confirm(globalObject: *JSC.JSGlobalObject, callframe: *JSC.CallFrame) bun.JSError!JSC.JSValue { + const arguments = callframe.arguments_old(1).slice(); + var output = bun.Output.writer(); + const has_message = arguments.len != 0; + + if (has_message) { + var state = std.heap.stackFallback(1024, bun.default_allocator); + const allocator = state.get(); + // 2. Set message to the result of normalizing newlines given message. + // * Not pertinent to a server runtime so we will just let the terminal handle this. + + // 3. Set message to the result of optionally truncating message. + // * Not necessary so we won't do it. + const message = try arguments[0].toSlice(globalObject, allocator); + defer message.deinit(); + + output.writeAll(message.slice()) catch { + // 1. If we cannot show simple dialogs for this, then return false. + return .false; + }; + } + + // 4. Show message to the user, treating U+000A LF as a line break, + // and ask the user to respond with a positive or negative + // response. + output.writeAll(if (has_message) " [y/N] " else "Confirm [y/N] ") catch { + // 1. If we cannot show simple dialogs for this, then return false. + return .false; + }; + + // 5. Invoke WebDriver BiDi user prompt opened with this, "confirm", and message. + // * Not relevant in a server context. + bun.Output.flush(); + + // 6. Pause until the user responds either positively or negatively. + var stdin = std.io.getStdIn(); + const unbuffered_reader = stdin.reader(); + var buffered = std.io.bufferedReader(unbuffered_reader); + var reader = buffered.reader(); + + const first_byte = reader.readByte() catch { + return .false; + }; + + // 7. Invoke WebDriver BiDi user prompt closed with this, and true if + // the user responded positively or false otherwise. + // * Not relevant in a server context. + + switch (first_byte) { + '\n' => return .false, + '\r' => { + const next_byte = reader.readByte() catch { + // They may have said yes, but the stdin is invalid. + return .false; + }; + if (next_byte == '\n') { + return .false; + } + }, + 'y', 'Y' => { + const next_byte = reader.readByte() catch { + // They may have said yes, but the stdin is invalid. + + return .false; + }; + + if (next_byte == '\n') { + // 8. If the user responded positively, return true; + // otherwise, the user responded negatively: return false. + return .true; + } else if (next_byte == '\r') { + //Check Windows style + const second_byte = reader.readByte() catch { + return .false; + }; + if (second_byte == '\n') { + return .true; + } + } + }, + else => {}, + } + + while (reader.readByte()) |b| { + if (b == '\n' or b == '\r') break; + } else |_| {} + + // 8. If the user responded positively, return true; otherwise, the user + // responded negatively: return false. + return .false; +} + +pub const prompt = struct { + /// Adapted from `std.io.Reader.readUntilDelimiterArrayList` to only append + /// and assume capacity. + pub fn readUntilDelimiterArrayListAppendAssumeCapacity( + reader: anytype, + array_list: *std.ArrayList(u8), + delimiter: u8, + max_size: usize, + ) !void { + while (true) { + if (array_list.items.len == max_size) { + return error.StreamTooLong; + } + + const byte: u8 = try reader.readByte(); + + if (byte == delimiter) { + return; + } + + array_list.appendAssumeCapacity(byte); + } + } + + /// Adapted from `std.io.Reader.readUntilDelimiterArrayList` to always append + /// and not resize. + fn readUntilDelimiterArrayListInfinity( + reader: anytype, + array_list: *std.ArrayList(u8), + delimiter: u8, + ) !void { + while (true) { + const byte: u8 = try reader.readByte(); + + if (byte == delimiter) { + return; + } + + try array_list.append(byte); + } + } + + /// https://html.spec.whatwg.org/multipage/timers-and-user-prompts.html#dom-prompt + pub fn call( + globalObject: *JSC.JSGlobalObject, + callframe: *JSC.CallFrame, + ) bun.JSError!JSC.JSValue { + const arguments = callframe.arguments_old(3).slice(); + var state = std.heap.stackFallback(2048, bun.default_allocator); + const allocator = state.get(); + var output = bun.Output.writer(); + const has_message = arguments.len != 0; + const has_default = arguments.len >= 2; + // 4. Set default to the result of optionally truncating default. + // * We don't really need to do this. + const default = if (has_default) arguments[1] else .null; + + if (has_message) { + // 2. Set message to the result of normalizing newlines given message. + // * Not pertinent to a server runtime so we will just let the terminal handle this. + + // 3. Set message to the result of optionally truncating message. + // * Not necessary so we won't do it. + const message = try arguments[0].toSlice(globalObject, allocator); + defer message.deinit(); + + output.writeAll(message.slice()) catch { + // 1. If we cannot show simple dialogs for this, then return null. + return .null; + }; + } + + // 4. Set default to the result of optionally truncating default. + + // 5. Show message to the user, treating U+000A LF as a line break, + // and ask the user to either respond with a string value or + // abort. The response must be defaulted to the value given by + // default. + output.writeAll(if (has_message) " " else "Prompt ") catch { + // 1. If we cannot show simple dialogs for this, then return false. + return .false; + }; + + if (has_default) { + const default_string = try arguments[1].toSlice(globalObject, allocator); + defer default_string.deinit(); + + output.print("[{s}] ", .{default_string.slice()}) catch { + // 1. If we cannot show simple dialogs for this, then return false. + return .false; + }; + } + + // 6. Invoke WebDriver BiDi user prompt opened with this, "prompt" and message. + // * Not relevant in a server context. + bun.Output.flush(); + + // unset `ENABLE_VIRTUAL_TERMINAL_INPUT` on windows. This prevents backspace from + // deleting the entire line + const original_mode: if (Environment.isWindows) ?bun.windows.DWORD else void = if (comptime Environment.isWindows) + bun.windows.updateStdioModeFlags(.std_in, .{ .unset = c.ENABLE_VIRTUAL_TERMINAL_INPUT }) catch null; + + defer if (comptime Environment.isWindows) { + if (original_mode) |mode| { + _ = bun.c.SetConsoleMode(bun.FD.stdin().native(), mode); + } + }; + + // 7. Pause while waiting for the user's response. + const reader = bun.Output.buffered_stdin.reader(); + var second_byte: ?u8 = null; + const first_byte = reader.readByte() catch { + // 8. Let result be null if the user aborts, or otherwise the string + // that the user responded with. + return .null; + }; + + if (first_byte == '\n') { + // 8. Let result be null if the user aborts, or otherwise the string + // that the user responded with. + return default; + } else if (first_byte == '\r') { + const second = reader.readByte() catch return .null; + second_byte = second; + if (second == '\n') return default; + } + + var input = std.ArrayList(u8).initCapacity(allocator, 2048) catch { + // 8. Let result be null if the user aborts, or otherwise the string + // that the user responded with. + return .null; + }; + defer input.deinit(); + + input.appendAssumeCapacity(first_byte); + if (second_byte) |second| input.appendAssumeCapacity(second); + + // All of this code basically just first tries to load the input into a + // buffer of size 2048. If that is too small, then increase the buffer + // size to 4096. If that is too small, then just dynamically allocate + // the rest. + readUntilDelimiterArrayListAppendAssumeCapacity(reader, &input, '\n', 2048) catch |e| { + if (e != error.StreamTooLong) { + // 8. Let result be null if the user aborts, or otherwise the string + // that the user responded with. + return .null; + } + + input.ensureTotalCapacity(4096) catch { + // 8. Let result be null if the user aborts, or otherwise the string + // that the user responded with. + return .null; + }; + + readUntilDelimiterArrayListAppendAssumeCapacity(reader, &input, '\n', 4096) catch |e2| { + if (e2 != error.StreamTooLong) { + // 8. Let result be null if the user aborts, or otherwise the string + // that the user responded with. + return .null; + } + + readUntilDelimiterArrayListInfinity(reader, &input, '\n') catch { + // 8. Let result be null if the user aborts, or otherwise the string + // that the user responded with. + return .null; + }; + }; + }; + + if (input.items.len > 0 and input.items[input.items.len - 1] == '\r') { + input.items.len -= 1; + } + + if (comptime Environment.allow_assert) { + bun.assert(input.items.len > 0); + bun.assert(input.items[input.items.len - 1] != '\r'); + } + + // 8. Let result be null if the user aborts, or otherwise the string + // that the user responded with. + var result = JSC.ZigString.init(input.items); + result.markUTF8(); + + // 9. Invoke WebDriver BiDi user prompt closed with this, false if + // result is null or true otherwise, and result. + // * Too complex for server context. + + // 9. Return result. + return result.toJS(globalObject); + } +}; + +const std = @import("std"); +const bun = @import("bun"); +const c = bun.c; +const Environment = bun.Environment; +const JSC = bun.jsc; diff --git a/src/bun.js/webcore/request.zig b/src/bun.js/webcore/request.zig deleted file mode 100644 index a609761f4d..0000000000 --- a/src/bun.js/webcore/request.zig +++ /dev/null @@ -1,984 +0,0 @@ -const std = @import("std"); -const Api = @import("../../api/schema.zig").Api; -const bun = @import("bun"); -const MimeType = bun.http.MimeType; -const ZigURL = @import("../../url.zig").URL; -const HTTPClient = bun.http; -const JSC = bun.JSC; - -const Method = @import("../../http/method.zig").Method; -const FetchHeaders = JSC.FetchHeaders; -const AbortSignal = JSC.WebCore.AbortSignal; -const ObjectPool = @import("../../pool.zig").ObjectPool; -const SystemError = JSC.SystemError; -const Output = bun.Output; -const MutableString = bun.MutableString; -const strings = bun.strings; -const string = bun.string; -const default_allocator = bun.default_allocator; -const FeatureFlags = bun.FeatureFlags; -const ArrayBuffer = @import("../base.zig").ArrayBuffer; -const Properties = @import("../base.zig").Properties; - -const castObj = @import("../base.zig").castObj; -const getAllocator = @import("../base.zig").getAllocator; - -const Environment = @import("../../env.zig"); -const ZigString = JSC.ZigString; -const IdentityContext = @import("../../identity_context.zig").IdentityContext; -const JSPromise = JSC.JSPromise; -const JSValue = JSC.JSValue; -const JSGlobalObject = JSC.JSGlobalObject; -const NullableAllocator = bun.NullableAllocator; - -const VirtualMachine = JSC.VirtualMachine; -const Task = JSC.Task; -const JSPrinter = bun.js_printer; -const picohttp = bun.picohttp; -const StringJoiner = bun.StringJoiner; -const uws = bun.uws; - -const InlineBlob = JSC.WebCore.InlineBlob; -const AnyBlob = JSC.WebCore.AnyBlob; -const InternalBlob = JSC.WebCore.InternalBlob; -const BodyMixin = JSC.WebCore.BodyMixin; -const Body = JSC.WebCore.Body; -const Blob = JSC.WebCore.Blob; -const Response = JSC.WebCore.Response; - -// https://developer.mozilla.org/en-US/docs/Web/API/Request -pub const Request = struct { - url: bun.String = bun.String.empty, - // NOTE(@cirospaciari): renamed to _headers to avoid direct manipulation, use getFetchHeaders, setFetchHeaders, ensureFetchHeaders and hasFetchHeaders instead - _headers: ?*FetchHeaders = null, - signal: ?*AbortSignal = null, - body: *JSC.BodyValueRef, - method: Method = Method.GET, - request_context: JSC.API.AnyRequestContext = JSC.API.AnyRequestContext.Null, - https: bool = false, - weak_ptr_data: WeakRef.Data = .empty, - // We must report a consistent value for this - reported_estimated_size: usize = 0, - internal_event_callback: InternalJSEventCallback = .{}, - - const RequestMixin = BodyMixin(@This()); - pub const js = JSC.Codegen.JSRequest; - // NOTE: toJS is overridden - pub const fromJS = js.fromJS; - pub const fromJSDirect = js.fromJSDirect; - - pub const new = bun.TrivialNew(@This()); - - pub const getText = RequestMixin.getText; - pub const getBytes = RequestMixin.getBytes; - pub const getBody = RequestMixin.getBody; - pub const getBodyUsed = RequestMixin.getBodyUsed; - pub const getJSON = RequestMixin.getJSON; - pub const getArrayBuffer = RequestMixin.getArrayBuffer; - pub const getBlob = RequestMixin.getBlob; - pub const getFormData = RequestMixin.getFormData; - pub const getBlobWithoutCallFrame = RequestMixin.getBlobWithoutCallFrame; - pub const WeakRef = bun.ptr.WeakPtr(Request, "weak_ptr_data"); - - pub fn memoryCost(this: *const Request) usize { - return @sizeOf(Request) + this.request_context.memoryCost() + this.url.byteSlice().len + this.body.value.memoryCost(); - } - - pub export fn Request__setCookiesOnRequestContext(this: *Request, cookieMap: ?*JSC.WebCore.CookieMap) void { - this.request_context.setCookies(cookieMap); - } - - pub export fn Request__getUWSRequest( - this: *Request, - ) ?*uws.Request { - return this.request_context.getRequest(); - } - - pub export fn Request__setInternalEventCallback( - this: *Request, - callback: JSC.JSValue, - globalThis: *JSC.JSGlobalObject, - ) void { - this.internal_event_callback = InternalJSEventCallback.init(callback, globalThis); - // we always have the abort event but we need to enable the timeout event as well in case of `node:http`.Server.setTimeout is set - this.request_context.enableTimeoutEvents(); - } - - pub export fn Request__setTimeout(this: *Request, seconds: JSC.JSValue, globalThis: *JSC.JSGlobalObject) void { - if (!seconds.isNumber()) { - globalThis.throw("Failed to set timeout: The provided value is not of type 'number'.", .{}) catch {}; - return; - } - - this.setTimeout(seconds.to(c_uint)); - } - - comptime { - _ = Request__getUWSRequest; - _ = Request__setInternalEventCallback; - _ = Request__setTimeout; - } - - pub const InternalJSEventCallback = struct { - function: JSC.Strong = .empty, - - pub const EventType = JSC.API.NodeHTTPResponse.AbortEvent; - - pub fn init(function: JSC.JSValue, globalThis: *JSC.JSGlobalObject) InternalJSEventCallback { - return InternalJSEventCallback{ - .function = JSC.Strong.create(function, globalThis), - }; - } - - pub fn hasCallback(this: *InternalJSEventCallback) bool { - return this.function.has(); - } - - pub fn trigger(this: *InternalJSEventCallback, eventType: EventType, globalThis: *JSC.JSGlobalObject) bool { - if (this.function.get()) |callback| { - _ = callback.call(globalThis, JSC.JSValue.jsUndefined(), &.{JSC.JSValue.jsNumber( - @intFromEnum(eventType), - )}) catch |err| globalThis.reportActiveExceptionAsUnhandled(err); - return true; - } - return false; - } - - pub fn deinit(this: *InternalJSEventCallback) void { - this.function.deinit(); - } - }; - - pub fn init( - url: bun.String, - headers: ?*FetchHeaders, - body: *JSC.BodyValueRef, - method: Method, - ) Request { - return Request{ - .url = url, - ._headers = headers, - .body = body, - .method = method, - }; - } - - pub fn getContentType( - this: *Request, - ) ?ZigString.Slice { - if (this.request_context.getRequest()) |req| { - if (req.header("content-type")) |value| { - return ZigString.Slice.fromUTF8NeverFree(value); - } - } - - if (this._headers) |headers| { - if (headers.fastGet(.ContentType)) |value| { - return value.toSlice(bun.default_allocator); - } - } - - if (this.body.value == .Blob) { - if (this.body.value.Blob.content_type.len > 0) - return ZigString.Slice.fromUTF8NeverFree(this.body.value.Blob.content_type); - } - - return null; - } - - pub fn getFormDataEncoding(this: *Request) ?*bun.FormData.AsyncFormData { - var content_type_slice: ZigString.Slice = this.getContentType() orelse return null; - defer content_type_slice.deinit(); - const encoding = bun.FormData.Encoding.get(content_type_slice.slice()) orelse return null; - return bun.FormData.AsyncFormData.init(bun.default_allocator, encoding) catch unreachable; - } - - pub fn estimatedSize(this: *Request) callconv(.C) usize { - return this.reported_estimated_size; - } - - pub fn getRemoteSocketInfo(this: *Request, globalObject: *JSC.JSGlobalObject) ?JSC.JSValue { - if (this.request_context.getRemoteSocketInfo()) |info| { - return JSC.JSSocketAddress.create(globalObject, info.ip, info.port, info.is_ipv6); - } - - return null; - } - - pub fn calculateEstimatedByteSize(this: *Request) void { - this.reported_estimated_size = this.body.value.estimatedSize() + this.sizeOfURL() + @sizeOf(Request); - } - - pub export fn Bun__JSRequest__calculateEstimatedByteSize(this: *Request) void { - this.calculateEstimatedByteSize(); - } - - pub fn toJS(this: *Request, globalObject: *JSGlobalObject) JSValue { - this.calculateEstimatedByteSize(); - return js.toJSUnchecked(globalObject, this); - } - - extern "JS" fn Bun__getParamsIfBunRequest(this_value: JSValue) JSValue; - - pub fn writeFormat(this: *Request, this_value: JSValue, comptime Formatter: type, formatter: *Formatter, writer: anytype, comptime enable_ansi_colors: bool) !void { - const Writer = @TypeOf(writer); - - const params_object = Bun__getParamsIfBunRequest(this_value); - - const class_label = switch (params_object) { - .zero => "Request", - else => "BunRequest", - }; - try writer.print("{s} ({}) {{\n", .{ class_label, bun.fmt.size(this.body.value.size(), .{}) }); - { - formatter.indent += 1; - defer formatter.indent -|= 1; - - try formatter.writeIndent(Writer, writer); - try writer.writeAll(comptime Output.prettyFmt("method: \"", enable_ansi_colors)); - - try writer.writeAll(bun.asByteSlice(@tagName(this.method))); - try writer.writeAll("\""); - formatter.printComma(Writer, writer, enable_ansi_colors) catch unreachable; - try writer.writeAll("\n"); - - try formatter.writeIndent(Writer, writer); - try writer.writeAll(comptime Output.prettyFmt("url: ", enable_ansi_colors)); - try this.ensureURL(); - try writer.print(comptime Output.prettyFmt("\"{}\"", enable_ansi_colors), .{this.url}); - formatter.printComma(Writer, writer, enable_ansi_colors) catch unreachable; - try writer.writeAll("\n"); - - if (params_object.isCell()) { - try formatter.writeIndent(Writer, writer); - try writer.writeAll(comptime Output.prettyFmt("params: ", enable_ansi_colors)); - try formatter.printAs(.Private, Writer, writer, params_object, .Object, enable_ansi_colors); - formatter.printComma(Writer, writer, enable_ansi_colors) catch unreachable; - try writer.writeAll("\n"); - } - - try formatter.writeIndent(Writer, writer); - try writer.writeAll(comptime Output.prettyFmt("headers: ", enable_ansi_colors)); - try formatter.printAs(.Private, Writer, writer, this.getHeaders(formatter.globalThis), .DOMWrapper, enable_ansi_colors); - - if (this.body.value == .Blob) { - try writer.writeAll("\n"); - try formatter.writeIndent(Writer, writer); - try this.body.value.Blob.writeFormat(Formatter, formatter, writer, enable_ansi_colors); - } else if (this.body.value == .InternalBlob or this.body.value == .WTFStringImpl) { - try writer.writeAll("\n"); - try formatter.writeIndent(Writer, writer); - const size = this.body.value.size(); - if (size == 0) { - var empty = Blob.initEmpty(undefined); - try empty.writeFormat(Formatter, formatter, writer, enable_ansi_colors); - } else { - try Blob.writeFormatForSize(false, size, writer, enable_ansi_colors); - } - } else if (this.body.value == .Locked) { - if (this.body.value.Locked.readable.get(this.body.value.Locked.global)) |stream| { - try writer.writeAll("\n"); - try formatter.writeIndent(Writer, writer); - try formatter.printAs(.Object, Writer, writer, stream.value, stream.value.jsType(), enable_ansi_colors); - } - } - } - try writer.writeAll("\n"); - try formatter.writeIndent(Writer, writer); - try writer.writeAll("}"); - } - - pub fn mimeType(this: *const Request) string { - if (this._headers) |headers| { - if (headers.fastGet(.ContentType)) |content_type| { - return content_type.slice(); - } - } - - switch (this.body.value) { - .Blob => |blob| { - if (blob.content_type.len > 0) { - return blob.content_type; - } - - return MimeType.other.value; - }, - .InternalBlob => return this.body.value.InternalBlob.contentType(), - .WTFStringImpl => return MimeType.text.value, - // .InlineBlob => return this.body.value.InlineBlob.contentType(), - .Null, .Error, .Used, .Locked, .Empty => return MimeType.other.value, - } - } - - pub fn getCache( - _: *Request, - globalThis: *JSC.JSGlobalObject, - ) JSC.JSValue { - return ZigString.init(Properties.UTF8.default).toJS(globalThis); - } - pub fn getCredentials( - _: *Request, - globalThis: *JSC.JSGlobalObject, - ) JSC.JSValue { - return ZigString.init(Properties.UTF8.include).toJS(globalThis); - } - pub fn getDestination( - _: *Request, - globalThis: *JSC.JSGlobalObject, - ) JSC.JSValue { - return ZigString.init("").toJS(globalThis); - } - - pub fn getIntegrity( - _: *Request, - globalThis: *JSC.JSGlobalObject, - ) JSC.JSValue { - return ZigString.Empty.toJS(globalThis); - } - - pub fn getSignal(this: *Request, globalThis: *JSC.JSGlobalObject) JSC.JSValue { - // Already have an C++ instance - if (this.signal) |signal| { - return signal.toJS(globalThis); - } else { - //Lazy create default signal - const js_signal = AbortSignal.create(globalThis); - js_signal.ensureStillAlive(); - if (AbortSignal.fromJS(js_signal)) |signal| { - this.signal = signal.ref(); - } - return js_signal; - } - } - - pub fn getMethod( - this: *Request, - globalThis: *JSC.JSGlobalObject, - ) JSC.JSValue { - return this.method.toJS(globalThis); - } - - pub fn getMode( - _: *Request, - globalThis: *JSC.JSGlobalObject, - ) JSC.JSValue { - return ZigString.init(Properties.UTF8.navigate).toJS(globalThis); - } - - pub fn finalizeWithoutDeinit(this: *Request) void { - if (this._headers) |headers| { - headers.deref(); - this._headers = null; - } - - this.url.deref(); - this.url = bun.String.empty; - - if (this.signal) |signal| { - signal.unref(); - this.signal = null; - } - this.internal_event_callback.deinit(); - } - - pub fn finalize(this: *Request) void { - this.finalizeWithoutDeinit(); - _ = this.body.unref(); - if (this.weak_ptr_data.onFinalize()) { - bun.destroy(this); - } - } - - pub fn getRedirect( - _: *Request, - globalThis: *JSC.JSGlobalObject, - ) JSC.JSValue { - return ZigString.init(Properties.UTF8.follow).toJS(globalThis); - } - pub fn getReferrer( - this: *Request, - globalObject: *JSC.JSGlobalObject, - ) JSC.JSValue { - if (this._headers) |headers_ref| { - if (headers_ref.get("referrer", globalObject)) |referrer| { - return ZigString.init(referrer).toJS(globalObject); - } - } - - return ZigString.init("").toJS(globalObject); - } - pub fn getReferrerPolicy( - _: *Request, - globalThis: *JSC.JSGlobalObject, - ) JSC.JSValue { - return ZigString.init("").toJS(globalThis); - } - pub fn getUrl(this: *Request, globalObject: *JSC.JSGlobalObject) JSC.JSValue { - this.ensureURL() catch { - globalObject.throw("Failed to join URL", .{}) catch {}; // TODO: propagate - return .zero; - }; - - return this.url.toJS(globalObject); - } - - pub fn sizeOfURL(this: *const Request) usize { - if (this.url.length() > 0) - return this.url.byteSlice().len; - - if (this.request_context.getRequest()) |req| { - const req_url = req.url(); - if (req_url.len > 0 and req_url[0] == '/') { - if (req.header("host")) |host| { - const fmt = bun.fmt.HostFormatter{ - .is_https = this.https, - .host = host, - }; - return this.getProtocol().len + req_url.len + std.fmt.count("{any}", .{fmt}); - } - } - return req_url.len; - } - - return 0; - } - - pub fn getProtocol(this: *const Request) []const u8 { - if (this.https) - return "https://"; - - return "http://"; - } - - pub fn ensureURL(this: *Request) !void { - if (!this.url.isEmpty()) return; - - if (this.request_context.getRequest()) |req| { - const req_url = req.url(); - if (req_url.len > 0 and req_url[0] == '/') { - if (req.header("host")) |host| { - const fmt = bun.fmt.HostFormatter{ - .is_https = this.https, - .host = host, - }; - const url_bytelength = std.fmt.count("{s}{any}{s}", .{ - this.getProtocol(), - fmt, - req_url, - }); - - if (comptime Environment.allow_assert) { - bun.assert(this.sizeOfURL() == url_bytelength); - } - - if (url_bytelength < 128) { - var buffer: [128]u8 = undefined; - const url = std.fmt.bufPrint(&buffer, "{s}{any}{s}", .{ - this.getProtocol(), - fmt, - req_url, - }) catch @panic("Unexpected error while printing URL"); - - if (comptime Environment.allow_assert) { - bun.assert(this.sizeOfURL() == url.len); - } - - var href = bun.JSC.URL.hrefFromString(bun.String.fromBytes(url)); - if (!href.isEmpty()) { - if (href.byteSlice().ptr == url.ptr) { - this.url = bun.String.createLatin1(url[0..href.length()]); - href.deref(); - } else { - this.url = href; - } - } else { - // TODO: what is the right thing to do for invalid URLS? - this.url = bun.String.createUTF8(url); - } - - return; - } - - if (strings.isAllASCII(host) and strings.isAllASCII(req_url)) { - this.url, const bytes = bun.String.createUninitialized(.latin1, url_bytelength); - _ = std.fmt.bufPrint(bytes, "{s}{any}{s}", .{ - this.getProtocol(), - fmt, - req_url, - }) catch |err| switch (err) { - error.NoSpaceLeft => unreachable, // exact space should have been counted - }; - } else { - // slow path - const temp_url = std.fmt.allocPrint(bun.default_allocator, "{s}{any}{s}", .{ - this.getProtocol(), - fmt, - req_url, - }) catch bun.outOfMemory(); - defer bun.default_allocator.free(temp_url); - this.url = bun.String.createUTF8(temp_url); - } - - const href = bun.JSC.URL.hrefFromString(this.url); - // TODO: what is the right thing to do for invalid URLS? - if (!href.isEmpty()) { - this.url.deref(); - this.url = href; - } - - return; - } - } - - if (comptime Environment.allow_assert) { - bun.assert(this.sizeOfURL() == req_url.len); - } - this.url = bun.String.createUTF8(req_url); - } - } - - const Fields = enum { - method, - headers, - body, - // referrer, - // referrerPolicy, - // mode, - // credentials, - // redirect, - // integrity, - // keepalive, - signal, - // proxy, - // timeout, - url, - }; - - pub fn constructInto(globalThis: *JSC.JSGlobalObject, arguments: []const JSC.JSValue) bun.JSError!Request { - var success = false; - const vm = globalThis.bunVM(); - const body = try vm.initRequestBodyValue(.{ .Null = {} }); - var req = Request{ - .body = body, - }; - defer { - if (!success) { - req.finalizeWithoutDeinit(); - _ = req.body.unref(); - } - if (req.body != body) { - _ = body.unref(); - } - } - - if (arguments.len == 0) { - return globalThis.throw("Failed to construct 'Request': 1 argument required, but only 0 present.", .{}); - } else if (arguments[0].isEmptyOrUndefinedOrNull() or !arguments[0].isCell()) { - return globalThis.throw("Failed to construct 'Request': expected non-empty string or object, got undefined", .{}); - } - - const url_or_object = arguments[0]; - const url_or_object_type = url_or_object.jsType(); - var fields = std.EnumSet(Fields).initEmpty(); - - const is_first_argument_a_url = - // fastest path: - url_or_object_type.isStringLike() or - // slower path: - url_or_object.as(JSC.DOMURL) != null; - - if (is_first_argument_a_url) { - const str = try bun.String.fromJS(arguments[0], globalThis); - req.url = str; - - if (!req.url.isEmpty()) - fields.insert(.url); - } else if (!url_or_object_type.isObject()) { - return globalThis.throw("Failed to construct 'Request': expected non-empty string or object", .{}); - } - - const values_to_try_ = [_]JSValue{ - if (arguments.len > 1 and arguments[1].isObject()) - arguments[1] - else if (is_first_argument_a_url) - JSValue.undefined - else - url_or_object, - if (is_first_argument_a_url) JSValue.undefined else url_or_object, - }; - const values_to_try = values_to_try_[0 .. @as(usize, @intFromBool(!is_first_argument_a_url)) + - @as(usize, @intFromBool(arguments.len > 1 and arguments[1].isObject()))]; - for (values_to_try) |value| { - const value_type = value.jsType(); - const explicit_check = values_to_try.len == 2 and value_type == .FinalObject and values_to_try[1].jsType() == .DOMWrapper; - if (value_type == .DOMWrapper) { - if (value.asDirect(Request)) |request| { - if (values_to_try.len == 1) { - request.cloneInto(&req, globalThis.allocator(), globalThis, fields.contains(.url)); - success = true; - return req; - } - - if (!fields.contains(.method)) { - req.method = request.method; - fields.insert(.method); - } - - if (!fields.contains(.headers)) { - if (request.cloneHeaders(globalThis)) |headers| { - req._headers = headers; - fields.insert(.headers); - } - - if (globalThis.hasException()) return error.JSError; - } - - if (!fields.contains(.body)) { - switch (request.body.value) { - .Null, .Empty, .Used => {}, - else => { - req.body.value = request.body.value.clone(globalThis); - if (globalThis.hasException()) return error.JSError; - fields.insert(.body); - }, - } - } - } - - if (value.asDirect(Response)) |response| { - if (!fields.contains(.method)) { - req.method = response.init.method; - fields.insert(.method); - } - - if (!fields.contains(.headers)) { - if (response.init.headers) |headers| { - req._headers = headers.cloneThis(globalThis); - fields.insert(.headers); - } - } - - if (!fields.contains(.url)) { - if (!response.url.isEmpty()) { - req.url = response.url.dupeRef(); - fields.insert(.url); - } - } - - if (!fields.contains(.body)) { - switch (response.body.value) { - .Null, .Empty, .Used => {}, - else => { - req.body.value = response.body.value.clone(globalThis); - fields.insert(.body); - }, - } - } - - if (globalThis.hasException()) return error.JSError; - } - } - - if (!fields.contains(.body)) { - if (value.fastGet(globalThis, .body)) |body_| { - fields.insert(.body); - req.body.value = try Body.Value.fromJS(globalThis, body_); - } - - if (globalThis.hasException()) return error.JSError; - } - - if (!fields.contains(.url)) { - if (value.fastGet(globalThis, .url)) |url| { - req.url = try bun.String.fromJS(url, globalThis); - if (!req.url.isEmpty()) - fields.insert(.url); - - // first value - } else if (@intFromEnum(value) == @intFromEnum(values_to_try[values_to_try.len - 1]) and !is_first_argument_a_url and - value.implementsToString(globalThis)) - { - const str = try bun.String.fromJS(value, globalThis); - req.url = str; - if (!req.url.isEmpty()) - fields.insert(.url); - } - - if (globalThis.hasException()) return error.JSError; - } - - if (!fields.contains(.signal)) { - if (try value.getTruthy(globalThis, "signal")) |signal_| { - fields.insert(.signal); - if (AbortSignal.fromJS(signal_)) |signal| { - //Keep it alive - signal_.ensureStillAlive(); - req.signal = signal.ref(); - } else { - if (!globalThis.hasException()) { - return globalThis.throw("Failed to construct 'Request': signal is not of type AbortSignal.", .{}); - } - return error.JSError; - } - } - - if (globalThis.hasException()) return error.JSError; - } - - if (!fields.contains(.method) or !fields.contains(.headers)) { - if (globalThis.hasException()) return error.JSError; - if (try Response.Init.init(globalThis, value)) |response_init| { - if (!explicit_check or (explicit_check and value.fastGet(globalThis, .headers) != null)) { - if (response_init.headers) |headers| { - if (!fields.contains(.headers)) { - req._headers = headers; - fields.insert(.headers); - } else { - headers.deref(); - } - } - } - - if (globalThis.hasException()) return error.JSError; - - if (!explicit_check or (explicit_check and value.fastGet(globalThis, .method) != null)) { - if (!fields.contains(.method)) { - req.method = response_init.method; - fields.insert(.method); - } - } - if (globalThis.hasException()) return error.JSError; - } - - if (globalThis.hasException()) return error.JSError; - } - } - - if (globalThis.hasException()) { - return error.JSError; - } - - if (req.url.isEmpty()) { - return globalThis.throw("Failed to construct 'Request': url is required.", .{}); - } - - const href = JSC.URL.hrefFromString(req.url); - if (href.isEmpty()) { - if (!globalThis.hasException()) { - // globalThis.throw can cause GC, which could cause the above string to be freed. - // so we must increment the reference count before calling it. - return globalThis.ERR(.INVALID_URL, "Failed to construct 'Request': Invalid URL \"{}\"", .{req.url}).throw(); - } - return error.JSError; - } - - // hrefFromString increments the reference count if they end up being - // the same - // - // we increment the reference count on usage above, so we must - // decrement it to be perfectly balanced. - req.url.deref(); - - req.url = href; - - if (req.body.value == .Blob and - req._headers != null and - req.body.value.Blob.content_type.len > 0 and - !req._headers.?.fastHas(.ContentType)) - { - req._headers.?.put(.ContentType, req.body.value.Blob.content_type, globalThis); - } - - req.calculateEstimatedByteSize(); - success = true; - - return req; - } - - pub fn constructor(globalThis: *JSC.JSGlobalObject, callframe: *JSC.CallFrame) bun.JSError!*Request { - const arguments_ = callframe.arguments_old(2); - const arguments = arguments_.ptr[0..arguments_.len]; - - const request = try constructInto(globalThis, arguments); - return Request.new(request); - } - - pub fn getBodyValue( - this: *Request, - ) *Body.Value { - return &this.body.value; - } - - pub fn doClone( - this: *Request, - globalThis: *JSC.JSGlobalObject, - callframe: *JSC.CallFrame, - ) bun.JSError!JSC.JSValue { - const this_value = callframe.this(); - var cloned = this.clone(getAllocator(globalThis), globalThis); - - if (globalThis.hasException()) { - cloned.finalize(); - return .zero; - } - - const js_wrapper = cloned.toJS(globalThis); - if (js_wrapper != .zero) { - if (cloned.body.value == .Locked) { - if (cloned.body.value.Locked.readable.get(globalThis)) |readable| { - // If we are teed, then we need to update the cached .body - // value to point to the new readable stream - // We must do this on both the original and cloned request - // but especially the original request since it will have a stale .body value now. - js.bodySetCached(js_wrapper, globalThis, readable.value); - if (this.body.value.Locked.readable.get(globalThis)) |other_readable| { - js.bodySetCached(this_value, globalThis, other_readable.value); - } - } - } - } - - return js_wrapper; - } - - // Returns if the request has headers already cached/set. - pub fn hasFetchHeaders(this: *Request) bool { - return this._headers != null; - } - - /// Sets the headers of the request. This will take ownership of the headers. - /// it will deref the previous headers if they exist. - pub fn setFetchHeaders( - this: *Request, - headers: ?*FetchHeaders, - ) void { - if (this._headers) |old_headers| { - old_headers.deref(); - } - - this._headers = headers; - } - - /// Returns the headers of the request. If the headers are not already cached, it will create a new FetchHeaders object. - /// If the headers are empty, it will look at request_context to get the headers. - /// If the headers are empty and request_context is null, it will create an empty FetchHeaders object. - pub fn ensureFetchHeaders( - this: *Request, - globalThis: *JSC.JSGlobalObject, - ) *FetchHeaders { - if (this._headers) |headers| { - // headers is already set - return headers; - } - - if (this.request_context.getRequest()) |req| { - // we have a request context, so we can get the headers from it - this._headers = FetchHeaders.createFromUWS(req); - } else { - // we don't have a request context, so we need to create an empty headers object - this._headers = FetchHeaders.createEmpty(); - - if (this.body.value == .Blob) { - const content_type = this.body.value.Blob.content_type; - if (content_type.len > 0) { - this._headers.?.put(.ContentType, content_type, globalThis); - } - } - } - - return this._headers.?; - } - - pub fn getFetchHeadersUnlessEmpty( - this: *Request, - ) ?*FetchHeaders { - if (this._headers == null) { - if (this.request_context.getRequest()) |req| { - // we have a request context, so we can get the headers from it - this._headers = FetchHeaders.createFromUWS(req); - } - } - - const headers = this._headers orelse return null; - if (headers.isEmpty()) { - return null; - } - return headers; - } - - /// Returns the headers of the request. This will not look at the request contex to get the headers. - pub fn getFetchHeaders( - this: *Request, - ) ?*FetchHeaders { - return this._headers; - } - - /// This should only be called by the JS code. use getFetchHeaders to get the current headers or ensureFetchHeaders to get the headers and create them if they don't exist. - pub fn getHeaders( - this: *Request, - globalThis: *JSC.JSGlobalObject, - ) JSC.JSValue { - return this.ensureFetchHeaders(globalThis).toJS(globalThis); - } - - pub fn cloneHeaders(this: *Request, globalThis: *JSGlobalObject) ?*FetchHeaders { - if (this._headers == null) { - if (this.request_context.getRequest()) |uws_req| { - this._headers = FetchHeaders.createFromUWS(uws_req); - } - } - - if (this._headers) |head| { - if (head.isEmpty()) { - return null; - } - - return head.cloneThis(globalThis); - } - - return null; - } - - pub fn cloneInto( - this: *Request, - req: *Request, - allocator: std.mem.Allocator, - globalThis: *JSGlobalObject, - preserve_url: bool, - ) void { - _ = allocator; - this.ensureURL() catch {}; - const vm = globalThis.bunVM(); - const body = vm.initRequestBodyValue(this.body.value.clone(globalThis)) catch { - if (!globalThis.hasException()) { - globalThis.throw("Failed to clone request", .{}) catch {}; - } - return; - }; - const original_url = req.url; - - req.* = Request{ - .body = body, - .url = if (preserve_url) original_url else this.url.dupeRef(), - .method = this.method, - ._headers = this.cloneHeaders(globalThis), - }; - - if (this.signal) |signal| { - req.signal = signal.ref(); - } - } - - pub fn clone(this: *Request, allocator: std.mem.Allocator, globalThis: *JSGlobalObject) *Request { - const req = Request.new(undefined); - this.cloneInto(req, allocator, globalThis, false); - return req; - } - - pub fn setTimeout( - this: *Request, - seconds: c_uint, - ) void { - _ = this.request_context.setTimeout(seconds); - } -}; diff --git a/src/bun.js/webcore/response.zig b/src/bun.js/webcore/response.zig deleted file mode 100644 index dca633dea8..0000000000 --- a/src/bun.js/webcore/response.zig +++ /dev/null @@ -1,944 +0,0 @@ -const std = @import("std"); -const Api = @import("../../api/schema.zig").Api; -const bun = @import("bun"); -const MimeType = bun.http.MimeType; -const ZigURL = @import("../../url.zig").URL; -const http = bun.http; -const FetchRedirect = http.FetchRedirect; -const JSC = bun.JSC; - -const Method = @import("../../http/method.zig").Method; -const FetchHeaders = JSC.FetchHeaders; -const ObjectPool = @import("../../pool.zig").ObjectPool; -const SystemError = JSC.SystemError; -const Output = bun.Output; -const MutableString = bun.MutableString; -const strings = bun.strings; -const string = bun.string; -const default_allocator = bun.default_allocator; -const FeatureFlags = bun.FeatureFlags; -const ArrayBuffer = @import("../base.zig").ArrayBuffer; -const Properties = @import("../base.zig").Properties; - -const castObj = @import("../base.zig").castObj; -const getAllocator = @import("../base.zig").getAllocator; - -const Environment = @import("../../env.zig"); -const ZigString = JSC.ZigString; -const IdentityContext = @import("../../identity_context.zig").IdentityContext; -const JSPromise = JSC.JSPromise; -const JSValue = JSC.JSValue; -const JSGlobalObject = JSC.JSGlobalObject; -const NullableAllocator = bun.NullableAllocator; -const DataURL = @import("../../resolver/data_url.zig").DataURL; - -const SSLConfig = @import("../api/server.zig").ServerConfig.SSLConfig; - -const VirtualMachine = JSC.VirtualMachine; -const Task = JSC.Task; -const JSPrinter = bun.js_printer; -const picohttp = bun.picohttp; -const StringJoiner = bun.StringJoiner; -const uws = bun.uws; -const Mutex = bun.Mutex; - -const InlineBlob = JSC.WebCore.InlineBlob; -const AnyBlob = JSC.WebCore.AnyBlob; -const InternalBlob = JSC.WebCore.InternalBlob; -const BodyMixin = JSC.WebCore.BodyMixin; -const Body = JSC.WebCore.Body; -const Request = JSC.WebCore.Request; -const Blob = JSC.WebCore.Blob; -const Async = bun.Async; - -const BoringSSL = bun.BoringSSL.c; -const X509 = @import("../api/bun/x509.zig"); -const PosixToWinNormalizer = bun.path.PosixToWinNormalizer; -const s3 = bun.S3; - -pub const Response = struct { - const ResponseMixin = BodyMixin(@This()); - pub const js = JSC.Codegen.JSResponse; - // NOTE: toJS is overridden - pub const fromJS = js.fromJS; - pub const fromJSDirect = js.fromJSDirect; - - body: Body, - init: Init, - url: bun.String = bun.String.empty, - redirected: bool = false, - /// We increment this count in fetch so if JS Response is discarted we can resolve the Body - /// In the server we use a flag response_protected to protect/unprotect the response - ref_count: u32 = 1, - - // We must report a consistent value for this - reported_estimated_size: usize = 0, - - pub const getText = ResponseMixin.getText; - pub const getBody = ResponseMixin.getBody; - pub const getBytes = ResponseMixin.getBytes; - pub const getBodyUsed = ResponseMixin.getBodyUsed; - pub const getJSON = ResponseMixin.getJSON; - pub const getArrayBuffer = ResponseMixin.getArrayBuffer; - pub const getBlob = ResponseMixin.getBlob; - pub const getBlobWithoutCallFrame = ResponseMixin.getBlobWithoutCallFrame; - pub const getFormData = ResponseMixin.getFormData; - - pub fn getFormDataEncoding(this: *Response) ?*bun.FormData.AsyncFormData { - var content_type_slice: ZigString.Slice = this.getContentType() orelse return null; - defer content_type_slice.deinit(); - const encoding = bun.FormData.Encoding.get(content_type_slice.slice()) orelse return null; - return bun.FormData.AsyncFormData.init(bun.default_allocator, encoding) catch bun.outOfMemory(); - } - - pub fn estimatedSize(this: *Response) callconv(.C) usize { - return this.reported_estimated_size; - } - - pub fn calculateEstimatedByteSize(this: *Response) void { - this.reported_estimated_size = this.body.value.estimatedSize() + - this.url.byteSlice().len + - this.init.status_text.byteSlice().len + - @sizeOf(Response); - } - - pub fn toJS(this: *Response, globalObject: *JSGlobalObject) JSValue { - this.calculateEstimatedByteSize(); - return js.toJSUnchecked(globalObject, this); - } - - pub fn getBodyValue( - this: *Response, - ) *Body.Value { - return &this.body.value; - } - - pub export fn jsFunctionRequestOrResponseHasBodyValue(globalObject: *JSC.JSGlobalObject, callframe: *JSC.CallFrame) callconv(JSC.conv) JSC.JSValue { - _ = globalObject; // autofix - const arguments = callframe.arguments_old(1); - const this_value = arguments.ptr[0]; - if (this_value.isEmptyOrUndefinedOrNull()) { - return .false; - } - - if (this_value.as(Response)) |response| { - return JSC.JSValue.jsBoolean(!response.body.value.isDefinitelyEmpty()); - } else if (this_value.as(Request)) |request| { - return JSC.JSValue.jsBoolean(!request.body.value.isDefinitelyEmpty()); - } - - return .false; - } - - pub export fn jsFunctionGetCompleteRequestOrResponseBodyValueAsArrayBuffer(globalObject: *JSC.JSGlobalObject, callframe: *JSC.CallFrame) callconv(JSC.conv) JSC.JSValue { - const arguments = callframe.arguments_old(1); - const this_value = arguments.ptr[0]; - if (this_value.isEmptyOrUndefinedOrNull()) { - return .undefined; - } - - const body: *Body.Value = brk: { - if (this_value.as(Response)) |response| { - break :brk &response.body.value; - } else if (this_value.as(Request)) |request| { - break :brk &request.body.value; - } - - return .undefined; - }; - - // Get the body if it's available synchronously. - switch (body.*) { - .Used, .Empty, .Null => return .undefined, - .Blob => |*blob| { - if (blob.isBunFile()) { - return .undefined; - } - defer body.* = .{ .Used = {} }; - return blob.toArrayBuffer(globalObject, .transfer) catch return .zero; - }, - .WTFStringImpl, .InternalBlob => { - var any_blob = body.useAsAnyBlob(); - return any_blob.toArrayBufferTransfer(globalObject) catch return .zero; - }, - .Error, .Locked => return .undefined, - } - } - - pub fn getFetchHeaders( - this: *Response, - ) ?*FetchHeaders { - return this.init.headers; - } - - pub inline fn statusCode(this: *const Response) u16 { - return this.init.status_code; - } - - pub fn redirectLocation(this: *const Response) ?[]const u8 { - return this.header(.Location); - } - - pub fn header(this: *const Response, name: JSC.FetchHeaders.HTTPHeaderName) ?[]const u8 { - return if ((this.init.headers orelse return null).fastGet(name)) |str| - str.slice() - else - null; - } - - pub const Props = struct {}; - - pub fn writeFormat(this: *Response, comptime Formatter: type, formatter: *Formatter, writer: anytype, comptime enable_ansi_colors: bool) !void { - const Writer = @TypeOf(writer); - try writer.print("Response ({}) {{\n", .{bun.fmt.size(this.body.len(), .{})}); - - { - formatter.indent += 1; - defer formatter.indent -|= 1; - - try formatter.writeIndent(Writer, writer); - try writer.writeAll(comptime Output.prettyFmt("ok: ", enable_ansi_colors)); - try formatter.printAs(.Boolean, Writer, writer, JSC.JSValue.jsBoolean(this.isOK()), .BooleanObject, enable_ansi_colors); - formatter.printComma(Writer, writer, enable_ansi_colors) catch bun.outOfMemory(); - try writer.writeAll("\n"); - - try formatter.writeIndent(Writer, writer); - try writer.writeAll(comptime Output.prettyFmt("url: \"", enable_ansi_colors)); - try writer.print(comptime Output.prettyFmt("{}", enable_ansi_colors), .{this.url}); - try writer.writeAll("\""); - formatter.printComma(Writer, writer, enable_ansi_colors) catch bun.outOfMemory(); - try writer.writeAll("\n"); - - try formatter.writeIndent(Writer, writer); - try writer.writeAll(comptime Output.prettyFmt("status: ", enable_ansi_colors)); - try formatter.printAs(.Double, Writer, writer, JSC.JSValue.jsNumber(this.init.status_code), .NumberObject, enable_ansi_colors); - formatter.printComma(Writer, writer, enable_ansi_colors) catch bun.outOfMemory(); - try writer.writeAll("\n"); - - try formatter.writeIndent(Writer, writer); - try writer.writeAll(comptime Output.prettyFmt("statusText: ", enable_ansi_colors)); - try writer.print(comptime Output.prettyFmt("\"{}\"", enable_ansi_colors), .{this.init.status_text}); - formatter.printComma(Writer, writer, enable_ansi_colors) catch bun.outOfMemory(); - try writer.writeAll("\n"); - - try formatter.writeIndent(Writer, writer); - try writer.writeAll(comptime Output.prettyFmt("headers: ", enable_ansi_colors)); - try formatter.printAs(.Private, Writer, writer, this.getHeaders(formatter.globalThis), .DOMWrapper, enable_ansi_colors); - formatter.printComma(Writer, writer, enable_ansi_colors) catch bun.outOfMemory(); - try writer.writeAll("\n"); - - try formatter.writeIndent(Writer, writer); - try writer.writeAll(comptime Output.prettyFmt("redirected: ", enable_ansi_colors)); - try formatter.printAs(.Boolean, Writer, writer, JSC.JSValue.jsBoolean(this.redirected), .BooleanObject, enable_ansi_colors); - formatter.printComma(Writer, writer, enable_ansi_colors) catch bun.outOfMemory(); - try writer.writeAll("\n"); - - formatter.resetLine(); - try this.body.writeFormat(Formatter, formatter, writer, enable_ansi_colors); - } - try writer.writeAll("\n"); - try formatter.writeIndent(Writer, writer); - try writer.writeAll("}"); - formatter.resetLine(); - } - - pub fn isOK(this: *const Response) bool { - return this.init.status_code >= 200 and this.init.status_code <= 299; - } - - pub fn getURL( - this: *Response, - globalThis: *JSC.JSGlobalObject, - ) JSC.JSValue { - // https://developer.mozilla.org/en-US/docs/Web/API/Response/url - return this.url.toJS(globalThis); - } - - pub fn getResponseType( - this: *Response, - globalThis: *JSC.JSGlobalObject, - ) JSC.JSValue { - if (this.init.status_code < 200) { - return bun.String.static("error").toJS(globalThis); - } - - return bun.String.static("default").toJS(globalThis); - } - - pub fn getStatusText( - this: *Response, - globalThis: *JSC.JSGlobalObject, - ) JSC.JSValue { - // https://developer.mozilla.org/en-US/docs/Web/API/Response/statusText - return this.init.status_text.toJS(globalThis); - } - - pub fn getRedirected( - this: *Response, - _: *JSC.JSGlobalObject, - ) JSC.JSValue { - // https://developer.mozilla.org/en-US/docs/Web/API/Response/redirected - return JSValue.jsBoolean(this.redirected); - } - - pub fn getOK( - this: *Response, - _: *JSC.JSGlobalObject, - ) JSC.JSValue { - // https://developer.mozilla.org/en-US/docs/Web/API/Response/ok - return JSValue.jsBoolean(this.isOK()); - } - - fn getOrCreateHeaders(this: *Response, globalThis: *JSC.JSGlobalObject) *FetchHeaders { - if (this.init.headers == null) { - this.init.headers = FetchHeaders.createEmpty(); - - if (this.body.value == .Blob) { - const content_type = this.body.value.Blob.content_type; - if (content_type.len > 0) { - this.init.headers.?.put(.ContentType, content_type, globalThis); - } - } - } - - return this.init.headers.?; - } - - pub fn getHeaders( - this: *Response, - globalThis: *JSC.JSGlobalObject, - ) JSC.JSValue { - return this.getOrCreateHeaders(globalThis).toJS(globalThis); - } - - pub fn doClone( - this: *Response, - globalThis: *JSC.JSGlobalObject, - callframe: *JSC.CallFrame, - ) bun.JSError!JSValue { - const this_value = callframe.this(); - const cloned = this.clone(globalThis); - if (globalThis.hasException()) { - cloned.finalize(); - return .zero; - } - - const js_wrapper = Response.makeMaybePooled(globalThis, cloned); - - if (js_wrapper != .zero) { - if (cloned.body.value == .Locked) { - if (cloned.body.value.Locked.readable.get(globalThis)) |readable| { - // If we are teed, then we need to update the cached .body - // value to point to the new readable stream - // We must do this on both the original and cloned response - // but especially the original response since it will have a stale .body value now. - js.bodySetCached(js_wrapper, globalThis, readable.value); - if (this.body.value.Locked.readable.get(globalThis)) |other_readable| { - js.bodySetCached(this_value, globalThis, other_readable.value); - } - } - } - } - - return js_wrapper; - } - - pub fn makeMaybePooled(globalObject: *JSC.JSGlobalObject, ptr: *Response) JSValue { - return ptr.toJS(globalObject); - } - - pub fn cloneValue( - this: *Response, - globalThis: *JSGlobalObject, - ) Response { - return Response{ - .body = this.body.clone(globalThis), - .init = this.init.clone(globalThis), - .url = this.url.clone(), - .redirected = this.redirected, - }; - } - - pub fn clone(this: *Response, globalThis: *JSGlobalObject) *Response { - return bun.new(Response, this.cloneValue(globalThis)); - } - - pub fn getStatus( - this: *Response, - _: *JSC.JSGlobalObject, - ) JSC.JSValue { - // https://developer.mozilla.org/en-US/docs/Web/API/Response/status - return JSValue.jsNumber(this.init.status_code); - } - - fn destroy(this: *Response) void { - this.init.deinit(bun.default_allocator); - this.body.deinit(bun.default_allocator); - this.url.deref(); - - bun.destroy(this); - } - - pub fn ref(this: *Response) *Response { - this.ref_count += 1; - return this; - } - - pub fn unref(this: *Response) void { - bun.assert(this.ref_count > 0); - this.ref_count -= 1; - if (this.ref_count == 0) { - this.destroy(); - } - } - - pub fn finalize( - this: *Response, - ) callconv(.C) void { - this.unref(); - } - - pub fn getContentType( - this: *Response, - ) ?ZigString.Slice { - if (this.init.headers) |headers| { - if (headers.fastGet(.ContentType)) |value| { - return value.toSlice(bun.default_allocator); - } - } - - if (this.body.value == .Blob) { - if (this.body.value.Blob.content_type.len > 0) - return ZigString.Slice.fromUTF8NeverFree(this.body.value.Blob.content_type); - } - - return null; - } - - pub fn constructJSON( - globalThis: *JSC.JSGlobalObject, - callframe: *JSC.CallFrame, - ) bun.JSError!JSValue { - const args_list = callframe.arguments_old(2); - // https://github.com/remix-run/remix/blob/db2c31f64affb2095e4286b91306b96435967969/packages/remix-server-runtime/responses.ts#L4 - var args = JSC.Node.ArgumentsSlice.init(globalThis.bunVM(), args_list.ptr[0..args_list.len]); - - var response = Response{ - .body = Body{ - .value = .{ .Empty = {} }, - }, - .init = Response.Init{ - .status_code = 200, - }, - .url = bun.String.empty, - }; - var did_succeed = false; - defer { - if (!did_succeed) { - response.body.deinit(bun.default_allocator); - response.init.deinit(bun.default_allocator); - } - } - const json_value = args.nextEat() orelse JSC.JSValue.zero; - - if (@intFromEnum(json_value) != 0) { - var str = bun.String.empty; - // calling JSON.stringify on an empty string adds extra quotes - // so this is correct - json_value.jsonStringify(globalThis, 0, &str); - - if (globalThis.hasException()) { - return .zero; - } - - if (!str.isEmpty()) { - if (str.value.WTFStringImpl.toUTF8IfNeeded(bun.default_allocator)) |bytes| { - defer str.deref(); - response.body.value = .{ - .InternalBlob = InternalBlob{ - .bytes = std.ArrayList(u8).fromOwnedSlice(bun.default_allocator, @constCast(bytes.slice())), - .was_string = true, - }, - }; - } else { - response.body.value = Body.Value{ - .WTFStringImpl = str.value.WTFStringImpl, - }; - } - } - } - - if (args.nextEat()) |init| { - if (init.isUndefinedOrNull()) {} else if (init.isNumber()) { - response.init.status_code = @as(u16, @intCast(@min(@max(0, init.toInt32()), std.math.maxInt(u16)))); - } else { - if (Response.Init.init(globalThis, init) catch |err| if (err == error.JSError) return .zero else null) |_init| { - response.init = _init; - } - } - } - - var headers_ref = response.getOrCreateHeaders(globalThis); - headers_ref.putDefault(.ContentType, MimeType.json.value, globalThis); - did_succeed = true; - return bun.new(Response, response).toJS(globalThis); - } - pub fn constructRedirect( - globalThis: *JSC.JSGlobalObject, - callframe: *JSC.CallFrame, - ) bun.JSError!JSValue { - var args_list = callframe.arguments_old(4); - // https://github.com/remix-run/remix/blob/db2c31f64affb2095e4286b91306b96435967969/packages/remix-server-runtime/responses.ts#L4 - var args = JSC.Node.ArgumentsSlice.init(globalThis.bunVM(), args_list.ptr[0..args_list.len]); - - var url_string_slice = ZigString.Slice.empty; - defer url_string_slice.deinit(); - var response: Response = brk: { - var response = Response{ - .init = Response.Init{ - .status_code = 302, - }, - .body = Body{ - .value = .{ .Empty = {} }, - }, - .url = bun.String.empty, - }; - - const url_string_value = args.nextEat() orelse JSC.JSValue.zero; - var url_string = ZigString.init(""); - - if (@intFromEnum(url_string_value) != 0) { - url_string = try url_string_value.getZigString(globalThis); - } - url_string_slice = url_string.toSlice(getAllocator(globalThis)); - var did_succeed = false; - defer { - if (!did_succeed) { - response.body.deinit(bun.default_allocator); - response.init.deinit(bun.default_allocator); - } - } - - if (args.nextEat()) |init| { - if (init.isUndefinedOrNull()) {} else if (init.isNumber()) { - response.init.status_code = @as(u16, @intCast(@min(@max(0, init.toInt32()), std.math.maxInt(u16)))); - } else { - if (Response.Init.init(globalThis, init) catch |err| - if (err == error.JSError) return .zero else null) |_init| - { - response.init = _init; - response.init.status_code = 302; - } - } - } - if (globalThis.hasException()) { - return .zero; - } - did_succeed = true; - break :brk response; - }; - - response.init.headers = response.getOrCreateHeaders(globalThis); - var headers_ref = response.init.headers.?; - headers_ref.put(.Location, url_string_slice.slice(), globalThis); - const ptr = bun.new(Response, response); - - return ptr.toJS(globalThis); - } - pub fn constructError( - globalThis: *JSC.JSGlobalObject, - _: *JSC.CallFrame, - ) bun.JSError!JSValue { - const response = bun.new( - Response, - Response{ - .init = Response.Init{ - .status_code = 0, - }, - .body = Body{ - .value = .{ .Empty = {} }, - }, - }, - ); - - return response.toJS(globalThis); - } - - pub fn constructor(globalThis: *JSC.JSGlobalObject, callframe: *JSC.CallFrame) bun.JSError!*Response { - const arguments = callframe.argumentsAsArray(2); - - if (!arguments[0].isUndefinedOrNull() and arguments[0].isObject()) { - if (arguments[0].as(Blob)) |blob| { - if (blob.isS3()) { - if (!arguments[1].isEmptyOrUndefinedOrNull()) { - return globalThis.throwInvalidArguments("new Response(s3File) do not support ResponseInit options", .{}); - } - var response: Response = .{ - .init = Response.Init{ - .status_code = 302, - }, - .body = Body{ - .value = .{ .Empty = {} }, - }, - .url = bun.String.empty, - }; - - const credentials = blob.store.?.data.s3.getCredentials(); - - const result = credentials.signRequest(.{ - .path = blob.store.?.data.s3.path(), - .method = .GET, - }, false, .{ .expires = 15 * 60 }) catch |sign_err| { - return s3.throwSignError(sign_err, globalThis); - }; - defer result.deinit(); - response.init.headers = response.getOrCreateHeaders(globalThis); - response.redirected = true; - var headers_ref = response.init.headers.?; - headers_ref.put(.Location, result.url, globalThis); - return bun.new(Response, response); - } - } - } - var init: Init = (brk: { - if (arguments[1].isUndefinedOrNull()) { - break :brk Init{ - .status_code = 200, - .headers = null, - }; - } - if (arguments[1].isObject()) { - break :brk try Init.init(globalThis, arguments[1]) orelse unreachable; - } - if (!globalThis.hasException()) { - return globalThis.throwInvalidArguments("Failed to construct 'Response': The provided body value is not of type 'ResponseInit'", .{}); - } - return error.JSError; - }); - errdefer init.deinit(bun.default_allocator); - - if (globalThis.hasException()) { - return error.JSError; - } - - var body: Body = brk: { - if (arguments[0].isUndefinedOrNull()) { - break :brk Body{ - .value = Body.Value{ .Null = {} }, - }; - } - break :brk try Body.extract(globalThis, arguments[0]); - }; - errdefer body.deinit(bun.default_allocator); - - if (globalThis.hasException()) { - return error.JSError; - } - - var response = bun.new(Response, Response{ - .body = body, - .init = init, - }); - - if (response.body.value == .Blob and - response.init.headers != null and - response.body.value.Blob.content_type.len > 0 and - !response.init.headers.?.fastHas(.ContentType)) - { - response.init.headers.?.put(.ContentType, response.body.value.Blob.content_type, globalThis); - } - - response.calculateEstimatedByteSize(); - - return response; - } - - pub const Init = struct { - headers: ?*FetchHeaders = null, - status_code: u16, - status_text: bun.String = bun.String.empty, - method: Method = Method.GET, - - pub fn clone(this: Init, ctx: *JSGlobalObject) Init { - var that = this; - const headers = this.headers; - if (headers) |head| { - that.headers = head.cloneThis(ctx); - } - that.status_text = this.status_text.clone(); - - return that; - } - - pub fn init(globalThis: *JSGlobalObject, response_init: JSC.JSValue) bun.JSError!?Init { - var result = Init{ .status_code = 200 }; - errdefer { - result.deinit(bun.default_allocator); - } - - if (!response_init.isCell()) - return null; - - if (response_init.jsType() == .DOMWrapper) { - // fast path: it's a Request object or a Response object - // we can skip calling JS getters - if (response_init.asDirect(Request)) |req| { - if (req.getFetchHeadersUnlessEmpty()) |headers| { - result.headers = headers.cloneThis(globalThis); - } - - result.method = req.method; - return result; - } - - if (response_init.asDirect(Response)) |resp| { - return resp.init.clone(globalThis); - } - } - - if (globalThis.hasException()) { - return error.JSError; - } - - if (response_init.fastGet(globalThis, .headers)) |headers| { - if (headers.as(FetchHeaders)) |orig| { - if (!orig.isEmpty()) { - result.headers = orig.cloneThis(globalThis); - } - } else { - result.headers = FetchHeaders.createFromJS(globalThis, headers); - } - } - - if (globalThis.hasException()) { - return error.JSError; - } - - if (response_init.fastGet(globalThis, .status)) |status_value| { - const number = status_value.coerceToInt64(globalThis); - if ((200 <= number and number < 600) or number == 101) { - result.status_code = @as(u16, @truncate(@as(u32, @intCast(number)))); - } else { - if (!globalThis.hasException()) { - const err = globalThis.createRangeErrorInstance("The status provided ({d}) must be 101 or in the range of [200, 599]", .{number}); - return globalThis.throwValue(err); - } - return error.JSError; - } - } - - if (globalThis.hasException()) { - return error.JSError; - } - - if (response_init.fastGet(globalThis, .statusText)) |status_text| { - result.status_text = try bun.String.fromJS(status_text, globalThis); - } - - if (response_init.fastGet(globalThis, .method)) |method_value| { - if (try Method.fromJS(globalThis, method_value)) |method| { - result.method = method; - } - } - - return result; - } - - pub fn deinit(this: *Init, _: std.mem.Allocator) void { - if (this.headers) |headers| { - this.headers = null; - - headers.deref(); - } - - this.status_text.deref(); - this.status_text = bun.String.empty; - } - }; - - pub fn @"404"(globalThis: *JSC.JSGlobalObject) Response { - return emptyWithStatus(globalThis, 404); - } - - pub fn @"200"(globalThis: *JSC.JSGlobalObject) Response { - return emptyWithStatus(globalThis, 200); - } - - inline fn emptyWithStatus(_: *JSC.JSGlobalObject, status: u16) Response { - return bun.new(Response, .{ - .body = Body{ - .value = Body.Value{ .Null = {} }, - }, - .init = Init{ - .status_code = status, - }, - }); - } -}; - -/// https://developer.mozilla.org/en-US/docs/Web/API/Headers -// TODO: move to http.zig. this has nothing to do with JSC or WebCore -pub const Headers = struct { - pub const Entry = struct { - name: Api.StringPointer, - value: Api.StringPointer, - - pub const List = bun.MultiArrayList(Entry); - }; - - entries: Entry.List = .{}, - buf: std.ArrayListUnmanaged(u8) = .{}, - allocator: std.mem.Allocator, - - pub fn memoryCost(this: *const Headers) usize { - return this.buf.items.len + this.entries.memoryCost(); - } - - pub fn clone(this: *Headers) !Headers { - return Headers{ - .entries = try this.entries.clone(this.allocator), - .buf = try this.buf.clone(this.allocator), - .allocator = this.allocator, - }; - } - - pub fn append(this: *Headers, name: []const u8, value: []const u8) !void { - var offset: u32 = @truncate(this.buf.items.len); - try this.buf.ensureUnusedCapacity(this.allocator, name.len + value.len); - const name_ptr = Api.StringPointer{ - .offset = offset, - .length = @truncate(name.len), - }; - this.buf.appendSliceAssumeCapacity(name); - offset = @truncate(this.buf.items.len); - this.buf.appendSliceAssumeCapacity(value); - - const value_ptr = Api.StringPointer{ - .offset = offset, - .length = @truncate(value.len), - }; - try this.entries.append(this.allocator, .{ - .name = name_ptr, - .value = value_ptr, - }); - } - - pub fn deinit(this: *Headers) void { - this.entries.deinit(this.allocator); - this.buf.clearAndFree(this.allocator); - } - pub fn getContentType(this: *const Headers) ?[]const u8 { - if (this.entries.len == 0 or this.buf.items.len == 0) { - return null; - } - const header_entries = this.entries.slice(); - const header_names = header_entries.items(.name); - const header_values = header_entries.items(.value); - - for (header_names, 0..header_names.len) |name, i| { - if (bun.strings.eqlCaseInsensitiveASCII(this.asStr(name), "content-type", true)) { - return this.asStr(header_values[i]); - } - } - return null; - } - pub fn asStr(this: *const Headers, ptr: Api.StringPointer) []const u8 { - return if (ptr.offset + ptr.length <= this.buf.items.len) - this.buf.items[ptr.offset..][0..ptr.length] - else - ""; - } - - pub const Options = struct { - body: ?*const AnyBlob = null, - }; - - pub fn fromPicoHttpHeaders(headers: []const picohttp.Header, allocator: std.mem.Allocator) !Headers { - const header_count = headers.len; - var result = Headers{ - .entries = .{}, - .buf = .{}, - .allocator = allocator, - }; - - var buf_len: usize = 0; - for (headers) |header| { - buf_len += header.name.len + header.value.len; - } - result.entries.ensureTotalCapacity(allocator, header_count) catch bun.outOfMemory(); - result.entries.len = headers.len; - result.buf.ensureTotalCapacityPrecise(allocator, buf_len) catch bun.outOfMemory(); - result.buf.items.len = buf_len; - var offset: u32 = 0; - for (headers, 0..headers.len) |header, i| { - const name_offset = offset; - bun.copy(u8, result.buf.items[offset..][0..header.name.len], header.name); - offset += @truncate(header.name.len); - const value_offset = offset; - bun.copy(u8, result.buf.items[offset..][0..header.value.len], header.value); - offset += @truncate(header.value.len); - - result.entries.set(i, .{ - .name = .{ - .offset = name_offset, - .length = @truncate(header.name.len), - }, - .value = .{ - .offset = value_offset, - .length = @truncate(header.value.len), - }, - }); - } - return result; - } - - pub fn from(fetch_headers_ref: ?*FetchHeaders, allocator: std.mem.Allocator, options: Options) !Headers { - var header_count: u32 = 0; - var buf_len: u32 = 0; - if (fetch_headers_ref) |headers_ref| - headers_ref.count(&header_count, &buf_len); - var headers = Headers{ - .entries = .{}, - .buf = .{}, - .allocator = allocator, - }; - const buf_len_before_content_type = buf_len; - const needs_content_type = brk: { - if (options.body) |body| { - if (body.hasContentTypeFromUser() and (fetch_headers_ref == null or !fetch_headers_ref.?.fastHas(.ContentType))) { - header_count += 1; - buf_len += @as(u32, @truncate(body.contentType().len + "Content-Type".len)); - break :brk true; - } - } - break :brk false; - }; - headers.entries.ensureTotalCapacity(allocator, header_count) catch bun.outOfMemory(); - headers.entries.len = header_count; - headers.buf.ensureTotalCapacityPrecise(allocator, buf_len) catch bun.outOfMemory(); - headers.buf.items.len = buf_len; - var sliced = headers.entries.slice(); - var names = sliced.items(.name); - var values = sliced.items(.value); - if (fetch_headers_ref) |headers_ref| - headers_ref.copyTo(names.ptr, values.ptr, headers.buf.items.ptr); - - // TODO: maybe we should send Content-Type header first instead of last? - if (needs_content_type) { - bun.copy(u8, headers.buf.items[buf_len_before_content_type..], "Content-Type"); - names[header_count - 1] = .{ - .offset = buf_len_before_content_type, - .length = "Content-Type".len, - }; - - bun.copy(u8, headers.buf.items[buf_len_before_content_type + "Content-Type".len ..], options.body.?.contentType()); - values[header_count - 1] = .{ - .offset = buf_len_before_content_type + @as(u32, "Content-Type".len), - .length = @as(u32, @truncate(options.body.?.contentType().len)), - }; - } - - return headers; - } -}; -pub const Fetch = @import("fetch.zig"); diff --git a/src/bun.js/webcore/streams.zig b/src/bun.js/webcore/streams.zig index c762ec0378..9235250072 100644 --- a/src/bun.js/webcore/streams.zig +++ b/src/bun.js/webcore/streams.zig @@ -1,470 +1,4 @@ -const std = @import("std"); -const Api = @import("../../api/schema.zig").Api; -const bun = @import("bun"); -const MimeType = HTTPClient.MimeType; -const ZigURL = @import("../../url.zig").URL; -const HTTPClient = bun.http; -const JSC = bun.JSC; - -const Method = @import("../../http/method.zig").Method; -const FetchHeaders = JSC.FetchHeaders; -const ObjectPool = @import("../../pool.zig").ObjectPool; -const SystemError = JSC.SystemError; -const Output = bun.Output; -const MutableString = bun.MutableString; -const strings = bun.strings; -const string = bun.string; -const default_allocator = bun.default_allocator; -const FeatureFlags = bun.FeatureFlags; -const ArrayBuffer = @import("../base.zig").ArrayBuffer; -const Properties = @import("../base.zig").Properties; -const Async = bun.Async; -const castObj = @import("../base.zig").castObj; -const getAllocator = @import("../base.zig").getAllocator; - -const Environment = @import("../../env.zig"); -const ZigString = JSC.ZigString; -const IdentityContext = @import("../../identity_context.zig").IdentityContext; -const JSInternalPromise = JSC.JSInternalPromise; -const JSPromise = JSC.JSPromise; -const JSValue = JSC.JSValue; -const JSGlobalObject = JSC.JSGlobalObject; -const E = bun.C.E; -const VirtualMachine = JSC.VirtualMachine; -const Task = JSC.Task; -const JSPrinter = bun.js_printer; -const picohttp = bun.picohttp; -const StringJoiner = bun.StringJoiner; -const uws = bun.uws; -const Blob = bun.JSC.WebCore.Blob; -const Response = JSC.WebCore.Response; -const Request = JSC.WebCore.Request; -const assert = bun.assert; -const Syscall = bun.sys; -const uv = bun.windows.libuv; - -const AnyBlob = bun.JSC.WebCore.AnyBlob; -pub const ReadableStream = struct { - value: JSValue, - ptr: Source, - pub const Strong = struct { - held: JSC.Strong = .empty, - - pub fn has(this: *Strong) bool { - return this.held.has(); - } - - pub fn isDisturbed(this: *const Strong, global: *JSC.JSGlobalObject) bool { - if (this.get(global)) |stream| { - return stream.isDisturbed(global); - } - - return false; - } - - pub fn init(this: ReadableStream, global: *JSGlobalObject) Strong { - return .{ - .held = JSC.Strong.create(this.value, global), - }; - } - - pub fn get(this: *const Strong, global: *JSC.JSGlobalObject) ?ReadableStream { - if (this.held.get()) |value| { - return ReadableStream.fromJS(value, global); - } - return null; - } - - pub fn deinit(this: *Strong) void { - // if (this.held.get()) |val| { - // ReadableStream__detach(val, this.held.globalThis.?); - // } - this.held.deinit(); - } - - pub fn tee(this: *Strong, global: *JSGlobalObject) ?ReadableStream { - if (this.get(global)) |stream| { - const first, const second = stream.tee(global) orelse return null; - this.held.set(global, first.value); - return second; - } - return null; - } - }; - - extern fn ReadableStream__tee(stream: JSValue, globalThis: *JSGlobalObject, out1: *JSC.JSValue, out2: *JSC.JSValue) bool; - pub fn tee(this: *const ReadableStream, globalThis: *JSGlobalObject) ?struct { ReadableStream, ReadableStream } { - var out1: JSC.JSValue = .zero; - var out2: JSC.JSValue = .zero; - if (!ReadableStream__tee(this.value, globalThis, &out1, &out2)) { - return null; - } - const out_stream2 = ReadableStream.fromJS(out2, globalThis) orelse return null; - const out_stream1 = ReadableStream.fromJS(out1, globalThis) orelse return null; - return .{ out_stream1, out_stream2 }; - } - - pub fn toJS(this: *const ReadableStream) JSValue { - return this.value; - } - - pub fn reloadTag(this: *ReadableStream, globalThis: *JSC.JSGlobalObject) void { - if (ReadableStream.fromJS(this.value, globalThis)) |stream| { - this.* = stream; - } else { - this.* = .{ .ptr = .{ .Invalid = {} }, .value = .zero }; - } - } - - pub fn toAnyBlob( - stream: *ReadableStream, - globalThis: *JSC.JSGlobalObject, - ) ?AnyBlob { - if (stream.isDisturbed(globalThis)) { - return null; - } - - stream.reloadTag(globalThis); - - switch (stream.ptr) { - .Blob => |blobby| { - if (blobby.toAnyBlob(globalThis)) |blob| { - stream.done(globalThis); - return blob; - } - }, - .File => |blobby| { - if (blobby.lazy == .blob) { - var blob = Blob.initWithStore(blobby.lazy.blob, globalThis); - blob.store.?.ref(); - // it should be lazy, file shouldn't have opened yet. - bun.assert(!blobby.started); - stream.done(globalThis); - return AnyBlob{ .Blob = blob }; - } - }, - .Bytes => |bytes| { - - // If we've received the complete body by the time this function is called - // we can avoid streaming it and convert it to a Blob - if (bytes.toAnyBlob()) |blob| { - stream.done(globalThis); - return blob; - } - - return null; - }, - else => {}, - } - - return null; - } - - pub fn done(this: *const ReadableStream, globalThis: *JSGlobalObject) void { - JSC.markBinding(@src()); - // done is called when we are done consuming the stream - // cancel actually mark the stream source as done - // this will resolve any pending promises to done: true - switch (this.ptr) { - .Blob => |source| { - source.parent().cancel(); - }, - .File => |source| { - source.parent().cancel(); - }, - .Bytes => |source| { - source.parent().cancel(); - }, - else => {}, - } - this.detachIfPossible(globalThis); - } - - pub fn cancel(this: *const ReadableStream, globalThis: *JSGlobalObject) void { - JSC.markBinding(@src()); - // cancel the stream - ReadableStream__cancel(this.value, globalThis); - // mark the stream source as done - this.done(globalThis); - } - - pub fn abort(this: *const ReadableStream, globalThis: *JSGlobalObject) void { - JSC.markBinding(@src()); - // for now we are just calling cancel should be fine - this.cancel(globalThis); - } - - pub fn forceDetach(this: *const ReadableStream, globalObject: *JSGlobalObject) void { - ReadableStream__detach(this.value, globalObject); - } - - /// Decrement Source ref count and detach the underlying stream if ref count is zero - /// be careful, this can invalidate the stream do not call this multiple times - /// this is meant to be called only once when we are done consuming the stream or from the ReadableStream.Strong.deinit - pub fn detachIfPossible(_: *const ReadableStream, _: *JSGlobalObject) void { - JSC.markBinding(@src()); - } - - pub const Tag = enum(i32) { - Invalid = -1, - - /// ReadableStreamDefaultController or ReadableByteStreamController - JavaScript = 0, - - /// ReadableByteStreamController - /// but with a BlobLoader - /// we can skip the BlobLoader and just use the underlying Blob - Blob = 1, - - /// ReadableByteStreamController - /// but with a FileLoader - /// we can skip the FileLoader and just use the underlying File - File = 2, - - /// This is a direct readable stream - /// That means we can turn it into whatever we want - Direct = 3, - - Bytes = 4, - }; - pub const Source = union(Tag) { - Invalid: void, - /// ReadableStreamDefaultController or ReadableByteStreamController - JavaScript: void, - /// ReadableByteStreamController - /// but with a BlobLoader - /// we can skip the BlobLoader and just use the underlying Blob - Blob: *ByteBlobLoader, - - /// ReadableByteStreamController - /// but with a FileLoader - /// we can skip the FileLoader and just use the underlying File - File: *FileReader, - - /// This is a direct readable stream - /// That means we can turn it into whatever we want - Direct: void, - - Bytes: *ByteStream, - }; - - extern fn ReadableStreamTag__tagged(globalObject: *JSGlobalObject, possibleReadableStream: *JSValue, ptr: *?*anyopaque) Tag; - extern fn ReadableStream__isDisturbed(possibleReadableStream: JSValue, globalObject: *JSGlobalObject) bool; - extern fn ReadableStream__isLocked(possibleReadableStream: JSValue, globalObject: *JSGlobalObject) bool; - extern fn ReadableStream__empty(*JSGlobalObject) JSC.JSValue; - extern fn ReadableStream__used(*JSGlobalObject) JSC.JSValue; - extern fn ReadableStream__cancel(stream: JSValue, *JSGlobalObject) void; - extern fn ReadableStream__abort(stream: JSValue, *JSGlobalObject) void; - extern fn ReadableStream__detach(stream: JSValue, *JSGlobalObject) void; - extern fn ReadableStream__fromBlob( - *JSGlobalObject, - store: *anyopaque, - offset: usize, - length: usize, - ) JSC.JSValue; - - pub fn isDisturbed(this: *const ReadableStream, globalObject: *JSGlobalObject) bool { - JSC.markBinding(@src()); - return isDisturbedValue(this.value, globalObject); - } - - pub fn isDisturbedValue(value: JSC.JSValue, globalObject: *JSGlobalObject) bool { - JSC.markBinding(@src()); - return ReadableStream__isDisturbed(value, globalObject); - } - - pub fn isLocked(this: *const ReadableStream, globalObject: *JSGlobalObject) bool { - JSC.markBinding(@src()); - return ReadableStream__isLocked(this.value, globalObject); - } - - pub fn fromJS(value: JSValue, globalThis: *JSGlobalObject) ?ReadableStream { - JSC.markBinding(@src()); - value.ensureStillAlive(); - var out = value; - - var ptr: ?*anyopaque = null; - return switch (ReadableStreamTag__tagged(globalThis, &out, &ptr)) { - .JavaScript => ReadableStream{ - .value = out, - .ptr = .{ - .JavaScript = {}, - }, - }, - .Blob => ReadableStream{ - .value = out, - .ptr = .{ - .Blob = @ptrCast(@alignCast(ptr.?)), - }, - }, - .File => ReadableStream{ - .value = out, - .ptr = .{ - .File = @ptrCast(@alignCast(ptr.?)), - }, - }, - - .Bytes => ReadableStream{ - .value = out, - .ptr = .{ - .Bytes = @ptrCast(@alignCast(ptr.?)), - }, - }, - - // .HTTPRequest => ReadableStream{ - // .value = out, - // .ptr = .{ - // .HTTPRequest = ptr.asPtr(HTTPRequest), - // }, - // }, - // .HTTPSRequest => ReadableStream{ - // .value = out, - // .ptr = .{ - // .HTTPSRequest = ptr.asPtr(HTTPSRequest), - // }, - // }, - else => null, - }; - } - - extern fn ZigGlobalObject__createNativeReadableStream(*JSGlobalObject, nativePtr: JSValue) JSValue; - - pub fn fromNative(globalThis: *JSGlobalObject, native: JSC.JSValue) JSC.JSValue { - JSC.markBinding(@src()); - return ZigGlobalObject__createNativeReadableStream(globalThis, native); - } - - pub fn fromBlob(globalThis: *JSGlobalObject, blob: *const Blob, recommended_chunk_size: Blob.SizeType) JSC.JSValue { - JSC.markBinding(@src()); - var store = blob.store orelse { - return ReadableStream.empty(globalThis); - }; - switch (store.data) { - .bytes => { - var reader = ByteBlobLoader.Source.new( - .{ - .globalThis = globalThis, - .context = undefined, - }, - ); - reader.context.setup(blob, recommended_chunk_size); - return reader.toReadableStream(globalThis); - }, - .file => { - var reader = FileReader.Source.new(.{ - .globalThis = globalThis, - .context = .{ - .event_loop = JSC.EventLoopHandle.init(globalThis.bunVM().eventLoop()), - .start_offset = blob.offset, - .max_size = if (blob.size != Blob.max_size) blob.size else null, - - .lazy = .{ - .blob = store, - }, - }, - }); - store.ref(); - - return reader.toReadableStream(globalThis); - }, - .s3 => |*s3| { - const credentials = s3.getCredentials(); - const path = s3.path(); - const proxy = globalThis.bunVM().transpiler.env.getHttpProxy(true, null); - const proxy_url = if (proxy) |p| p.href else null; - - return bun.S3.readableStream(credentials, path, blob.offset, if (blob.size != Blob.max_size) blob.size else null, proxy_url, globalThis); - }, - } - } - - pub fn fromFileBlobWithOffset( - globalThis: *JSGlobalObject, - blob: *const Blob, - offset: usize, - ) bun.JSError!JSC.JSValue { - JSC.markBinding(@src()); - var store = blob.store orelse { - return ReadableStream.empty(globalThis); - }; - switch (store.data) { - .file => { - var reader = FileReader.Source.new(.{ - .globalThis = globalThis, - .context = .{ - .event_loop = JSC.EventLoopHandle.init(globalThis.bunVM().eventLoop()), - .start_offset = offset, - .lazy = .{ - .blob = store, - }, - }, - }); - store.ref(); - - return reader.toReadableStream(globalThis); - }, - else => { - return globalThis.throw("Expected FileBlob", .{}); - }, - } - } - - pub fn fromPipe( - globalThis: *JSGlobalObject, - parent: anytype, - buffered_reader: anytype, - ) JSC.JSValue { - _ = parent; // autofix - JSC.markBinding(@src()); - var source = FileReader.Source.new(.{ - .globalThis = globalThis, - .context = .{ - .event_loop = JSC.EventLoopHandle.init(globalThis.bunVM().eventLoop()), - }, - }); - source.context.reader.from(buffered_reader, &source.context); - - return source.toReadableStream(globalThis); - } - - pub fn empty(globalThis: *JSGlobalObject) JSC.JSValue { - JSC.markBinding(@src()); - - return ReadableStream__empty(globalThis); - } - - pub fn used(globalThis: *JSGlobalObject) JSC.JSValue { - JSC.markBinding(@src()); - - return ReadableStream__used(globalThis); - } - - const Base = @import("../../ast/base.zig"); - pub const StreamTag = enum(usize) { - invalid = 0, - _, - - pub fn init(filedes: bun.FileDescriptor) StreamTag { - var bytes = [8]u8{ 1, 0, 0, 0, 0, 0, 0, 0 }; - const filedes_ = @as([8]u8, @bitCast(@as(usize, @as(u56, @truncate(@as(usize, @intCast(filedes))))))); - bytes[1..8].* = filedes_[0..7].*; - - return @as(StreamTag, @enumFromInt(@as(u64, @bitCast(bytes)))); - } - - pub fn fd(this: StreamTag) bun.FileDescriptor { - var bytes = @as([8]u8, @bitCast(@intFromEnum(this))); - if (bytes[0] != 1) { - return bun.invalid_fd; - } - const out: u64 = 0; - @as([8]u8, @bitCast(out))[0..7].* = bytes[1..8].*; - return @as(bun.FileDescriptor, @intCast(out)); - } - }; -}; - -pub const StreamStart = union(Tag) { +pub const Start = union(Tag) { empty: void, err: Syscall.Error, chunk_size: Blob.SizeType, @@ -473,7 +7,7 @@ pub const StreamStart = union(Tag) { as_uint8array: bool, stream: bool, }, - FileSink: FileSinkOptions, + FileSink: FileSink.Options, HTTPSResponseSink: void, HTTPResponseSink: void, NetworkSink: void, @@ -481,20 +15,6 @@ pub const StreamStart = union(Tag) { owned_and_done: bun.ByteList, done: bun.ByteList, - pub const FileSinkOptions = struct { - chunk_size: Blob.SizeType = 1024, - input_path: PathOrFileDescriptor, - truncate: bool = true, - close: bool = false, - mode: bun.Mode = 0o664, - - pub fn flags(this: *const FileSinkOptions) i32 { - _ = this; - - return bun.O.NONBLOCK | bun.O.CLOEXEC | bun.O.CREAT | bun.O.WRONLY; - } - }; - pub const Tag = enum { empty, err, @@ -509,7 +29,7 @@ pub const StreamStart = union(Tag) { done, }; - pub fn toJS(this: StreamStart, globalThis: *JSGlobalObject) JSC.JSValue { + pub fn toJS(this: Start, globalThis: *JSGlobalObject) JSC.JSValue { switch (this) { .empty, .ready => { return .undefined; @@ -532,7 +52,7 @@ pub const StreamStart = union(Tag) { } } - pub fn fromJS(globalThis: *JSGlobalObject, value: JSValue) bun.JSError!StreamStart { + pub fn fromJS(globalThis: *JSGlobalObject, value: JSValue) bun.JSError!Start { if (value.isEmptyOrUndefinedOrNull() or !value.isObject()) { return .{ .empty = {} }; } @@ -549,7 +69,7 @@ pub const StreamStart = union(Tag) { globalThis: *JSGlobalObject, value: JSValue, comptime tag: Tag, - ) bun.JSError!StreamStart { + ) bun.JSError!Start { if (value.isEmptyOrUndefinedOrNull() or !value.isObject()) { return .{ .empty = {} }; } @@ -604,7 +124,7 @@ pub const StreamStart = union(Tag) { if (!path.isString()) { return .{ .err = Syscall.Error{ - .errno = @intFromEnum(bun.C.SystemErrno.EINVAL), + .errno = @intFromEnum(bun.sys.SystemErrno.EINVAL), .syscall = .write, }, }; @@ -622,7 +142,7 @@ pub const StreamStart = union(Tag) { if (!fd_value.isAnyInt()) { return .{ .err = Syscall.Error{ - .errno = @intFromEnum(bun.C.SystemErrno.EBADF), + .errno = @intFromEnum(bun.sys.SystemErrno.EBADF), .syscall = .write, }, }; @@ -637,7 +157,7 @@ pub const StreamStart = union(Tag) { }; } else { return .{ .err = Syscall.Error{ - .errno = @intFromEnum(bun.C.SystemErrno.EBADF), + .errno = @intFromEnum(bun.sys.SystemErrno.EBADF), .syscall = .write, } }; } @@ -674,17 +194,7 @@ pub const StreamStart = union(Tag) { } }; -pub const DrainResult = union(enum) { - owned: struct { - list: std.ArrayList(u8), - size_hint: usize, - }, - estimated_size: usize, - empty: void, - aborted: void, -}; - -pub const StreamResult = union(Tag) { +pub const Result = union(Tag) { pending: *Pending, err: StreamError, done: void, @@ -695,7 +205,7 @@ pub const StreamResult = union(Tag) { into_array: IntoArray, into_array_and_done: IntoArray, - pub fn deinit(this: *StreamResult) void { + pub fn deinit(this: *Result) void { switch (this.*) { .owned => |*owned| owned.deinitWithAllocator(bun.default_allocator), .owned_and_done => |*owned_and_done| owned_and_done.deinitWithAllocator(bun.default_allocator), @@ -748,12 +258,12 @@ pub const StreamResult = union(Tag) { into_array_and_done, }; - pub fn slice16(this: *const StreamResult) []const u16 { + pub fn slice16(this: *const Result) []const u16 { const bytes = this.slice(); return @as([*]const u16, @ptrCast(@alignCast(bytes.ptr)))[0..std.mem.bytesAsSlice(u16, bytes).len]; } - pub fn slice(this: *const StreamResult) []const u8 { + pub fn slice(this: *const Result) []const u8 { return switch (this.*) { .owned => |owned| owned.slice(), .owned_and_done => |owned_and_done| owned_and_done.slice(), @@ -763,7 +273,7 @@ pub const StreamResult = union(Tag) { }; } - pub const Writable = union(StreamResult.Tag) { + pub const Writable = union(Result.Tag) { pending: *Writable.Pending, err: Syscall.Error, @@ -780,7 +290,7 @@ pub const StreamResult = union(Tag) { future: Future = .{ .none = {} }, result: Writable, consumed: Blob.SizeType = 0, - state: StreamResult.Pending.State = .none, + state: Result.Pending.State = .none, pub fn deinit(this: *@This()) void { this.future.deinit(); @@ -826,13 +336,13 @@ pub const StreamResult = union(Tag) { ctx: *anyopaque, handler: Fn, - pub const Fn = *const fn (ctx: *anyopaque, result: StreamResult.Writable) void; + pub const Fn = *const fn (ctx: *anyopaque, result: Result.Writable) void; - pub fn init(this: *Handler, comptime Context: type, ctx: *Context, comptime handler_fn: fn (*Context, StreamResult.Writable) void) void { + pub fn init(this: *Handler, comptime Context: type, ctx: *Context, comptime handler_fn: fn (*Context, Result.Writable) void) void { this.ctx = ctx; this.handler = struct { const handler = handler_fn; - pub fn onHandle(ctx_: *anyopaque, result: StreamResult.Writable) void { + pub fn onHandle(ctx_: *anyopaque, result: Result.Writable) void { @call(bun.callmod_inline, handler, .{ bun.cast(*Context, ctx_), result }); } }.onHandle; @@ -909,10 +419,10 @@ pub const StreamResult = union(Tag) { pub const Pending = struct { future: Future = undefined, - result: StreamResult = .{ .done = {} }, + result: Result = .{ .done = {} }, state: State = .none, - pub fn set(this: *Pending, comptime Context: type, ctx: *Context, comptime handler_fn: fn (*Context, StreamResult) void) void { + pub fn set(this: *Pending, comptime Context: type, ctx: *Context, comptime handler_fn: fn (*Context, Result) void) void { this.future.init(Context, ctx, handler_fn); this.state = .pending; } @@ -936,7 +446,7 @@ pub const StreamResult = union(Tag) { }, handler: Handler, - pub fn init(this: *Future, comptime Context: type, ctx: *Context, comptime handler_fn: fn (*Context, StreamResult) void) void { + pub fn init(this: *Future, comptime Context: type, ctx: *Context, comptime handler_fn: fn (*Context, Result) void) void { this.* = .{ .handler = undefined, }; @@ -948,13 +458,13 @@ pub const StreamResult = union(Tag) { ctx: *anyopaque, handler: Fn, - pub const Fn = *const fn (ctx: *anyopaque, result: StreamResult) void; + pub const Fn = *const fn (ctx: *anyopaque, result: Result) void; - pub fn init(this: *Handler, comptime Context: type, ctx: *Context, comptime handler_fn: fn (*Context, StreamResult) void) void { + pub fn init(this: *Handler, comptime Context: type, ctx: *Context, comptime handler_fn: fn (*Context, Result) void) void { this.ctx = ctx; this.handler = struct { const handler = handler_fn; - pub fn onHandle(ctx_: *anyopaque, result: StreamResult) void { + pub fn onHandle(ctx_: *anyopaque, result: Result) void { @call(bun.callmod_inline, handler, .{ bun.cast(*Context, ctx_), result }); } }.onHandle; @@ -972,7 +482,7 @@ pub const StreamResult = union(Tag) { this.state = .used; switch (this.future) { .promise => |p| { - StreamResult.fulfillPromise(&this.result, p.promise, p.globalThis); + Result.fulfillPromise(&this.result, p.promise, p.globalThis); }, .handler => |h| { h.handler(h.ctx, this.result); @@ -981,14 +491,14 @@ pub const StreamResult = union(Tag) { } }; - pub fn isDone(this: *const StreamResult) bool { + pub fn isDone(this: *const Result) bool { return switch (this.*) { .owned_and_done, .temporary_and_done, .into_array_and_done, .done, .err => true, else => false, }; } - pub fn fulfillPromise(result: *StreamResult, promise: *JSC.JSPromise, globalThis: *JSC.JSGlobalObject) void { + pub fn fulfillPromise(result: *Result, promise: *JSC.JSPromise, globalThis: *JSC.JSGlobalObject) void { const vm = globalThis.bunVM(); const loop = vm.eventLoop(); const promise_value = promise.asValue(globalThis); @@ -1023,7 +533,7 @@ pub const StreamResult = union(Tag) { } } - pub fn toJS(this: *const StreamResult, globalThis: *JSGlobalObject) JSValue { + pub fn toJS(this: *const Result, globalThis: *JSGlobalObject) JSValue { if (JSC.VirtualMachine.get().isShuttingDown()) { var that = this.*; that.deinit(); @@ -1166,865 +676,12 @@ pub const Signal = struct { }; }; -pub const Sink = struct { - ptr: *anyopaque, - vtable: VTable, - status: Status = Status.closed, - used: bool = false, - - pub const pending = Sink{ - .ptr = @as(*anyopaque, @ptrFromInt(0xaaaaaaaa)), - .vtable = undefined, - }; - - pub const Status = enum { - ready, - closed, - }; - - pub const Data = union(enum) { - utf16: StreamResult, - latin1: StreamResult, - bytes: StreamResult, - }; - - pub fn initWithType(comptime Type: type, handler: *Type) Sink { - return .{ - .ptr = handler, - .vtable = VTable.wrap(Type), - .status = .ready, - .used = false, - }; - } - - pub fn init(handler: anytype) Sink { - return initWithType(std.meta.Child(@TypeOf(handler)), handler); - } - - pub const UTF8Fallback = struct { - const stack_size = 1024; - pub fn writeLatin1(comptime Ctx: type, ctx: *Ctx, input: StreamResult, comptime writeFn: anytype) StreamResult.Writable { - const str = input.slice(); - if (strings.isAllASCII(str)) { - return writeFn( - ctx, - input, - ); - } - - if (stack_size >= str.len) { - var buf: [stack_size]u8 = undefined; - @memcpy(buf[0..str.len], str); - - strings.replaceLatin1WithUTF8(buf[0..str.len]); - if (input.isDone()) { - const result = writeFn(ctx, .{ .temporary_and_done = bun.ByteList.init(buf[0..str.len]) }); - return result; - } else { - const result = writeFn(ctx, .{ .temporary = bun.ByteList.init(buf[0..str.len]) }); - return result; - } - } - - { - var slice = bun.default_allocator.alloc(u8, str.len) catch return .{ .err = Syscall.Error.oom }; - @memcpy(slice[0..str.len], str); - - strings.replaceLatin1WithUTF8(slice[0..str.len]); - if (input.isDone()) { - return writeFn(ctx, .{ .owned_and_done = bun.ByteList.init(slice) }); - } else { - return writeFn(ctx, .{ .owned = bun.ByteList.init(slice) }); - } - } - } - - pub fn writeUTF16(comptime Ctx: type, ctx: *Ctx, input: StreamResult, comptime writeFn: anytype) StreamResult.Writable { - const str: []const u16 = std.mem.bytesAsSlice(u16, input.slice()); - - if (stack_size >= str.len * 2) { - var buf: [stack_size]u8 = undefined; - const copied = strings.copyUTF16IntoUTF8(&buf, []const u16, str, true); - bun.assert(copied.written <= stack_size); - bun.assert(copied.read <= stack_size); - if (input.isDone()) { - const result = writeFn(ctx, .{ .temporary_and_done = bun.ByteList.init(buf[0..copied.written]) }); - return result; - } else { - const result = writeFn(ctx, .{ .temporary = bun.ByteList.init(buf[0..copied.written]) }); - return result; - } - } - - { - const allocated = strings.toUTF8Alloc(bun.default_allocator, str) catch return .{ .err = Syscall.Error.oom }; - if (input.isDone()) { - return writeFn(ctx, .{ .owned_and_done = bun.ByteList.init(allocated) }); - } else { - return writeFn(ctx, .{ .owned = bun.ByteList.init(allocated) }); - } - } - } - }; - - pub const VTable = struct { - pub const WriteUTF16Fn = *const (fn (this: *anyopaque, data: StreamResult) StreamResult.Writable); - pub const WriteUTF8Fn = *const (fn (this: *anyopaque, data: StreamResult) StreamResult.Writable); - pub const WriteLatin1Fn = *const (fn (this: *anyopaque, data: StreamResult) StreamResult.Writable); - pub const EndFn = *const (fn (this: *anyopaque, err: ?Syscall.Error) JSC.Maybe(void)); - pub const ConnectFn = *const (fn (this: *anyopaque, signal: Signal) JSC.Maybe(void)); - - connect: ConnectFn, - write: WriteUTF8Fn, - writeLatin1: WriteLatin1Fn, - writeUTF16: WriteUTF16Fn, - end: EndFn, - - pub fn wrap( - comptime Wrapped: type, - ) VTable { - const Functions = struct { - pub fn onWrite(this: *anyopaque, data: StreamResult) StreamResult.Writable { - return Wrapped.write(@as(*Wrapped, @ptrCast(@alignCast(this))), data); - } - pub fn onConnect(this: *anyopaque, signal: Signal) JSC.Maybe(void) { - return Wrapped.connect(@as(*Wrapped, @ptrCast(@alignCast(this))), signal); - } - pub fn onWriteLatin1(this: *anyopaque, data: StreamResult) StreamResult.Writable { - return Wrapped.writeLatin1(@as(*Wrapped, @ptrCast(@alignCast(this))), data); - } - pub fn onWriteUTF16(this: *anyopaque, data: StreamResult) StreamResult.Writable { - return Wrapped.writeUTF16(@as(*Wrapped, @ptrCast(@alignCast(this))), data); - } - pub fn onEnd(this: *anyopaque, err: ?Syscall.Error) JSC.Maybe(void) { - return Wrapped.end(@as(*Wrapped, @ptrCast(@alignCast(this))), err); - } - }; - - return VTable{ - .write = Functions.onWrite, - .writeLatin1 = Functions.onWriteLatin1, - .writeUTF16 = Functions.onWriteUTF16, - .end = Functions.onEnd, - .connect = Functions.onConnect, - }; - } - }; - - pub fn end(this: *Sink, err: ?Syscall.Error) JSC.Maybe(void) { - if (this.status == .closed) { - return .{ .result = {} }; - } - - this.status = .closed; - return this.vtable.end(this.ptr, err); - } - - pub fn writeLatin1(this: *Sink, data: StreamResult) StreamResult.Writable { - if (this.status == .closed) { - return .{ .done = {} }; - } - - const res = this.vtable.writeLatin1(this.ptr, data); - this.status = if ((res.isDone()) or this.status == .closed) - Status.closed - else - Status.ready; - this.used = true; - return res; - } - - pub fn writeBytes(this: *Sink, data: StreamResult) StreamResult.Writable { - if (this.status == .closed) { - return .{ .done = {} }; - } - - const res = this.vtable.write(this.ptr, data); - this.status = if ((res.isDone()) or this.status == .closed) - Status.closed - else - Status.ready; - this.used = true; - return res; - } - - pub fn writeUTF16(this: *Sink, data: StreamResult) StreamResult.Writable { - if (this.status == .closed) { - return .{ .done = {} }; - } - - const res = this.vtable.writeUTF16(this.ptr, data); - this.status = if ((res.isDone()) or this.status == .closed) - Status.closed - else - Status.ready; - this.used = true; - return res; - } - - pub fn write(this: *Sink, data: Data) StreamResult.Writable { - switch (data) { - .utf16 => |str| { - return this.writeUTF16(str); - }, - .latin1 => |str| { - return this.writeLatin1(str); - }, - .bytes => |bytes| { - return this.writeBytes(bytes); - }, - } - } -}; - -pub const ArrayBufferSink = struct { - bytes: bun.ByteList, - allocator: std.mem.Allocator, - done: bool = false, - signal: Signal = .{}, - next: ?Sink = null, - streaming: bool = false, - as_uint8array: bool = false, - - pub fn connect(this: *ArrayBufferSink, signal: Signal) void { - bun.assert(this.reader == null); - this.signal = signal; - } - - pub fn start(this: *ArrayBufferSink, stream_start: StreamStart) JSC.Maybe(void) { - this.bytes.len = 0; - var list = this.bytes.listManaged(this.allocator); - list.clearRetainingCapacity(); - - switch (stream_start) { - .ArrayBufferSink => |config| { - if (config.chunk_size > 0) { - list.ensureTotalCapacityPrecise(config.chunk_size) catch return .{ .err = Syscall.Error.oom }; - this.bytes.update(list); - } - - this.as_uint8array = config.as_uint8array; - this.streaming = config.stream; - }, - else => {}, - } - - this.done = false; - - this.signal.start(); - return .{ .result = {} }; - } - - pub fn flush(_: *ArrayBufferSink) JSC.Maybe(void) { - return .{ .result = {} }; - } - - pub fn flushFromJS(this: *ArrayBufferSink, globalThis: *JSGlobalObject, wait: bool) JSC.Maybe(JSValue) { - if (this.streaming) { - const value: JSValue = switch (this.as_uint8array) { - true => JSC.ArrayBuffer.create(globalThis, this.bytes.slice(), .Uint8Array), - false => JSC.ArrayBuffer.create(globalThis, this.bytes.slice(), .ArrayBuffer), - }; - this.bytes.len = 0; - if (wait) {} - return .{ .result = value }; - } - - return .{ .result = JSValue.jsNumber(0) }; - } - - pub fn finalize(this: *ArrayBufferSink) void { - if (this.bytes.len > 0) { - this.bytes.listManaged(this.allocator).deinit(); - this.bytes = bun.ByteList.init(""); - this.done = true; - } - - this.allocator.destroy(this); - } - - pub fn init(allocator: std.mem.Allocator, next: ?Sink) !*ArrayBufferSink { - const this = try allocator.create(ArrayBufferSink); - this.* = ArrayBufferSink{ - .bytes = bun.ByteList.init(&.{}), - .allocator = allocator, - .next = next, - }; - return this; - } - - pub fn construct( - this: *ArrayBufferSink, - allocator: std.mem.Allocator, - ) void { - this.* = ArrayBufferSink{ - .bytes = bun.ByteList{}, - .allocator = allocator, - .next = null, - }; - } - - pub fn write(this: *@This(), data: StreamResult) StreamResult.Writable { - if (this.next) |*next| { - return next.writeBytes(data); - } - - const len = this.bytes.write(this.allocator, data.slice()) catch { - return .{ .err = Syscall.Error.oom }; - }; - this.signal.ready(null, null); - return .{ .owned = len }; - } - pub const writeBytes = write; - pub fn writeLatin1(this: *@This(), data: StreamResult) StreamResult.Writable { - if (this.next) |*next| { - return next.writeLatin1(data); - } - const len = this.bytes.writeLatin1(this.allocator, data.slice()) catch { - return .{ .err = Syscall.Error.oom }; - }; - this.signal.ready(null, null); - return .{ .owned = len }; - } - pub fn writeUTF16(this: *@This(), data: StreamResult) StreamResult.Writable { - if (this.next) |*next| { - return next.writeUTF16(data); - } - const len = this.bytes.writeUTF16(this.allocator, @as([*]const u16, @ptrCast(@alignCast(data.slice().ptr)))[0..std.mem.bytesAsSlice(u16, data.slice()).len]) catch { - return .{ .err = Syscall.Error.oom }; - }; - this.signal.ready(null, null); - return .{ .owned = len }; - } - - pub fn end(this: *ArrayBufferSink, err: ?Syscall.Error) JSC.Maybe(void) { - if (this.next) |*next| { - return next.end(err); - } - this.signal.close(err); - return .{ .result = {} }; - } - pub fn destroy(this: *ArrayBufferSink) void { - this.bytes.deinitWithAllocator(this.allocator); - this.allocator.destroy(this); - } - pub fn toJS(this: *ArrayBufferSink, globalThis: *JSGlobalObject, as_uint8array: bool) JSValue { - if (this.streaming) { - const value: JSValue = switch (as_uint8array) { - true => JSC.ArrayBuffer.create(globalThis, this.bytes.slice(), .Uint8Array), - false => JSC.ArrayBuffer.create(globalThis, this.bytes.slice(), .ArrayBuffer), - }; - this.bytes.len = 0; - return value; - } - - var list = this.bytes.listManaged(this.allocator); - this.bytes = bun.ByteList.init(""); - return ArrayBuffer.fromBytes( - try list.toOwnedSlice(), - if (as_uint8array) - .Uint8Array - else - .ArrayBuffer, - ).toJS(globalThis, null); - } - - pub fn endFromJS(this: *ArrayBufferSink, _: *JSGlobalObject) JSC.Maybe(ArrayBuffer) { - if (this.done) { - return .{ .result = ArrayBuffer.fromBytes(&[_]u8{}, .ArrayBuffer) }; - } - - bun.assert(this.next == null); - var list = this.bytes.listManaged(this.allocator); - this.bytes = bun.ByteList.init(""); - this.done = true; - this.signal.close(null); - return .{ .result = ArrayBuffer.fromBytes( - list.toOwnedSlice() catch bun.outOfMemory(), - if (this.as_uint8array) - .Uint8Array - else - .ArrayBuffer, - ) }; - } - - pub fn sink(this: *ArrayBufferSink) Sink { - return Sink.init(this); - } - - pub fn memoryCost(this: *const ArrayBufferSink) usize { - // Since this is a JSSink, the NewJSSink function does @sizeOf(JSSink) which includes @sizeOf(ArrayBufferSink). - return this.bytes.cap; - } - - pub const JSSink = NewJSSink(@This(), "ArrayBufferSink"); -}; - -pub const SinkDestructor = struct { - const Detached = opaque {}; - const Subprocess = JSC.API.Bun.Subprocess; - pub const Ptr = bun.TaggedPointerUnion(.{ - Detached, - Subprocess, - }); - - pub export fn Bun__onSinkDestroyed( - ptr_value: ?*anyopaque, - sink_ptr: ?*anyopaque, - ) callconv(.C) void { - _ = sink_ptr; // autofix - const ptr = Ptr.from(ptr_value); - - if (ptr.isNull()) { - return; - } - - switch (ptr.tag()) { - @field(Ptr.Tag, @typeName(Detached)) => { - return; - }, - @field(Ptr.Tag, @typeName(Subprocess)) => { - const subprocess = ptr.as(Subprocess); - subprocess.onStdinDestroyed(); - }, - else => { - Output.debugWarn("Unknown sink type", .{}); - }, - } - } -}; - -pub fn NewJSSink(comptime SinkType: type, comptime abi_name: []const u8) type { - return struct { - sink: SinkType, - - const ThisSink = @This(); - - // This attaches it to JS - pub const SinkSignal = extern struct { - cpp: JSValue, - - pub fn init(cpp: JSValue) Signal { - // this one can be null - @setRuntimeSafety(false); - return Signal.initWithType(SinkSignal, @as(*SinkSignal, @ptrFromInt(@as(usize, @bitCast(@intFromEnum(cpp)))))); - } - - pub fn close(this: *@This(), _: ?Syscall.Error) void { - onClose(@as(SinkSignal, @bitCast(@intFromPtr(this))).cpp, JSValue.jsUndefined()); - } - - pub fn ready(this: *@This(), _: ?Blob.SizeType, _: ?Blob.SizeType) void { - onReady(@as(SinkSignal, @bitCast(@intFromPtr(this))).cpp, JSValue.jsUndefined(), JSValue.jsUndefined()); - } - - pub fn start(_: *@This()) void {} - }; - - pub fn memoryCost(this: *ThisSink) callconv(.C) usize { - return @sizeOf(ThisSink) + SinkType.memoryCost(&this.sink); - } - - const AssignToStreamFn = *const fn (*JSGlobalObject, JSValue, *anyopaque, **anyopaque) callconv(.C) JSValue; - const OnCloseFn = *const fn (JSValue, JSValue) callconv(.C) void; - const OnReadyFn = *const fn (JSValue, JSValue, JSValue) callconv(.C) void; - const OnStartFn = *const fn (JSValue, *JSGlobalObject) callconv(.C) void; - const CreateObjectFn = *const fn (*JSGlobalObject, *anyopaque, usize) callconv(.C) JSValue; - const SetDestroyCallbackFn = *const fn (JSValue, usize) callconv(.C) void; - const DetachPtrFn = *const fn (JSValue) callconv(.C) void; - - const assignToStreamExtern = @extern(AssignToStreamFn, .{ .name = abi_name ++ "__assignToStream" }); - const onCloseExtern = @extern(OnCloseFn, .{ .name = abi_name ++ "__onClose" }); - const onReadyExtern = @extern(OnReadyFn, .{ .name = abi_name ++ "__onReady" }); - const onStartExtern = @extern(OnStartFn, .{ .name = abi_name ++ "__onStart" }); - const createObjectExtern = @extern(CreateObjectFn, .{ .name = abi_name ++ "__createObject" }); - const setDestroyCallbackExtern = @extern(SetDestroyCallbackFn, .{ .name = abi_name ++ "__setDestroyCallback" }); - const detachPtrExtern = @extern(DetachPtrFn, .{ .name = abi_name ++ "__detachPtr" }); - - pub fn assignToStream(globalThis: *JSGlobalObject, stream: JSValue, ptr: *anyopaque, jsvalue_ptr: **anyopaque) JSValue { - return assignToStreamExtern(globalThis, stream, ptr, jsvalue_ptr); - } - - pub fn onClose(ptr: JSValue, reason: JSValue) void { - JSC.markBinding(@src()); - return onCloseExtern(ptr, reason); - } - - pub fn onReady(ptr: JSValue, amount: JSValue, offset: JSValue) void { - JSC.markBinding(@src()); - return onReadyExtern(ptr, amount, offset); - } - - pub fn onStart(ptr: JSValue, globalThis: *JSGlobalObject) void { - JSC.markBinding(@src()); - return onStartExtern(ptr, globalThis); - } - - pub fn createObject(globalThis: *JSGlobalObject, object: *anyopaque, destructor: usize) JSValue { - JSC.markBinding(@src()); - return createObjectExtern(globalThis, object, destructor); - } - - pub fn setDestroyCallback(value: JSValue, callback: usize) void { - JSC.markBinding(@src()); - return setDestroyCallbackExtern(value, callback); - } - - pub fn detachPtr(ptr: JSValue) void { - return detachPtrExtern(ptr); - } - - pub fn construct(globalThis: *JSGlobalObject, _: *JSC.CallFrame) bun.JSError!JSC.JSValue { - JSC.markBinding(@src()); - - if (comptime !@hasDecl(SinkType, "construct")) { - const Static = struct { - pub const message = std.fmt.comptimePrint("{s} is not constructable", .{SinkType.name}); - }; - const err = JSC.SystemError{ - .message = bun.String.static(Static.message), - .code = bun.String.static(@tagName(.ERR_ILLEGAL_CONSTRUCTOR)), - }; - return globalThis.throwValue(err.toErrorInstance(globalThis)); - } - - var allocator = globalThis.bunVM().allocator; - var this = allocator.create(ThisSink) catch { - return globalThis.throwValue(Syscall.Error.oom.toJSC(globalThis)); - }; - this.sink.construct(allocator); - return createObject(globalThis, this, 0); - } - - pub fn finalize(ptr: *anyopaque) callconv(.C) void { - var this = @as(*ThisSink, @ptrCast(@alignCast(ptr))); - - this.sink.finalize(); - } - - pub fn detach(this: *ThisSink) void { - if (comptime !@hasField(SinkType, "signal")) - return; - - const ptr = this.sink.signal.ptr; - if (this.sink.signal.isDead()) - return; - this.sink.signal.clear(); - const value = @as(JSValue, @enumFromInt(@as(JSC.JSValueReprInt, @bitCast(@intFromPtr(ptr))))); - value.unprotect(); - detachPtr(value); - } - - // The code generator encodes two distinct failure types using 0 and 1 - const FromJSResult = enum(usize) { - /// The sink has been closed and the wrapped type is freed. - detached = 0, - /// JS exception has not yet been thrown - cast_failed = 1, - /// *ThisSink - _, - }; - const fromJSExtern = @extern( - *const fn (value: JSValue) callconv(.C) FromJSResult, - .{ .name = abi_name ++ "__fromJS" }, - ); - - pub fn fromJS(value: JSValue) ?*ThisSink { - switch (fromJSExtern(value)) { - .detached, .cast_failed => return null, - else => |ptr| return @ptrFromInt(@intFromEnum(ptr)), - } - } - - fn getThis(global: *JSGlobalObject, callframe: *const JSC.CallFrame) bun.JSError!*ThisSink { - return switch (fromJSExtern(callframe.this())) { - .detached => global.throw("This " ++ abi_name ++ " has already been closed. A \"direct\" ReadableStream terminates its underlying socket once `async pull()` returns.", .{}), - .cast_failed => global.ERR(.INVALID_THIS, "Expected " ++ abi_name, .{}).throw(), - else => |ptr| @ptrFromInt(@intFromEnum(ptr)), - }; - } - - pub fn unprotect(this: *@This()) void { - _ = this; - } - - pub fn write(globalThis: *JSGlobalObject, callframe: *JSC.CallFrame) bun.JSError!JSC.JSValue { - JSC.markBinding(@src()); - const this = try getThis(globalThis, callframe); - - if (comptime @hasDecl(SinkType, "getPendingError")) { - if (this.sink.getPendingError()) |err| { - return globalThis.throwValue(err); - } - } - - const args_list = callframe.arguments_old(4); - const args = args_list.ptr[0..args_list.len]; - - if (args.len == 0) { - return globalThis.throwValue(JSC.toTypeError(.MISSING_ARGS, "write() expects a string, ArrayBufferView, or ArrayBuffer", .{}, globalThis)); - } - - const arg = args[0]; - arg.ensureStillAlive(); - defer arg.ensureStillAlive(); - - if (arg.isEmptyOrUndefinedOrNull()) { - return globalThis.throwValue(JSC.toTypeError(.STREAM_NULL_VALUES, "write() expects a string, ArrayBufferView, or ArrayBuffer", .{}, globalThis)); - } - - if (arg.asArrayBuffer(globalThis)) |buffer| { - const slice = buffer.slice(); - if (slice.len == 0) { - return JSC.JSValue.jsNumber(0); - } - - return this.sink.writeBytes(.{ .temporary = bun.ByteList.init(slice) }).toJS(globalThis); - } - - if (!arg.isString()) { - return globalThis.throwValue(JSC.toTypeError(.INVALID_ARG_TYPE, "write() expects a string, ArrayBufferView, or ArrayBuffer", .{}, globalThis)); - } - - const str = arg.toString(globalThis); - if (globalThis.hasException()) { - return .zero; - } - - const view = str.view(globalThis); - - if (view.isEmpty()) { - return JSC.JSValue.jsNumber(0); - } - - defer str.ensureStillAlive(); - if (view.is16Bit()) { - return this.sink.writeUTF16(.{ .temporary = bun.ByteList.initConst(std.mem.sliceAsBytes(view.utf16SliceAligned())) }).toJS(globalThis); - } - - return this.sink.writeLatin1(.{ .temporary = bun.ByteList.initConst(view.slice()) }).toJS(globalThis); - } - - pub fn writeUTF8(globalThis: *JSGlobalObject, callframe: *JSC.CallFrame) bun.JSError!JSC.JSValue { - JSC.markBinding(@src()); - - const this = try getThis(globalThis, callframe); - - if (comptime @hasDecl(SinkType, "getPendingError")) { - if (this.sink.getPendingError()) |err| { - return globalThis.throwValue(err); - } - } - - const args_list = callframe.arguments_old(4); - const args = args_list.ptr[0..args_list.len]; - if (args.len == 0 or !args[0].isString()) { - const err = JSC.toTypeError( - if (args.len == 0) .MISSING_ARGS else .INVALID_ARG_TYPE, - "writeUTF8() expects a string", - .{}, - globalThis, - ); - return globalThis.throwValue(err); - } - - const arg = args[0]; - - const str = arg.toString(globalThis); - if (globalThis.hasException()) { - return .zero; - } - - const view = str.view(globalThis); - if (view.isEmpty()) { - return JSC.JSValue.jsNumber(0); - } - - defer str.ensureStillAlive(); - if (str.is16Bit()) { - return this.sink.writeUTF16(.{ .temporary = view.utf16SliceAligned() }).toJS(globalThis); - } - - return this.sink.writeLatin1(.{ .temporary = view.slice() }).toJS(globalThis); - } - - pub fn close(globalThis: *JSGlobalObject, sink_ptr: ?*anyopaque) callconv(.C) JSValue { - JSC.markBinding(@src()); - const this: *ThisSink = @ptrCast(@alignCast(sink_ptr orelse return .undefined)); - - if (comptime @hasDecl(SinkType, "getPendingError")) { - if (this.sink.getPendingError()) |err| { - return globalThis.vm().throwError(globalThis, err) catch .zero; - } - } - - return this.sink.end(null).toJS(globalThis); - } - - pub fn flush(globalThis: *JSGlobalObject, callframe: *JSC.CallFrame) bun.JSError!JSC.JSValue { - JSC.markBinding(@src()); - - const this = try getThis(globalThis, callframe); - - if (comptime @hasDecl(SinkType, "getPendingError")) { - if (this.sink.getPendingError()) |err| { - return globalThis.throwValue(err); - } - } - - defer { - if ((comptime @hasField(SinkType, "done")) and this.sink.done) { - this.unprotect(); - } - } - - if (comptime @hasDecl(SinkType, "flushFromJS")) { - const wait = callframe.argumentsCount() > 0 and callframe.argument(0).isBoolean() and callframe.argument(0).asBoolean(); - const maybe_value: JSC.Maybe(JSValue) = this.sink.flushFromJS(globalThis, wait); - return switch (maybe_value) { - .result => |value| value, - .err => |err| return globalThis.throwValue(err.toJSC(globalThis)), - }; - } - - return this.sink.flush().toJS(globalThis); - } - - pub fn start(globalThis: *JSGlobalObject, callframe: *JSC.CallFrame) bun.JSError!JSC.JSValue { - JSC.markBinding(@src()); - - const this = try getThis(globalThis, callframe); - - if (comptime @hasDecl(SinkType, "getPendingError")) { - if (this.sink.getPendingError()) |err| { - return globalThis.throwValue(err); - } - } - - if (comptime @hasField(StreamStart, abi_name)) { - return this.sink.start( - if (callframe.argumentsCount() > 0) - try StreamStart.fromJSWithTag(globalThis, callframe.argument(0), comptime @field(StreamStart, abi_name)) - else - StreamStart{ .empty = {} }, - ).toJS(globalThis); - } - - return this.sink.start( - if (callframe.argumentsCount() > 0) - try StreamStart.fromJS(globalThis, callframe.argument(0)) - else - StreamStart{ .empty = {} }, - ).toJS(globalThis); - } - - pub fn end(globalThis: *JSGlobalObject, callframe: *JSC.CallFrame) bun.JSError!JSC.JSValue { - JSC.markBinding(@src()); - - const this = try getThis(globalThis, callframe); - - if (comptime @hasDecl(SinkType, "getPendingError")) { - if (this.sink.getPendingError()) |err| { - return globalThis.throwValue(err); - } - } - - defer { - if (comptime @hasField(SinkType, "done")) { - if (this.sink.done) { - callframe.this().unprotect(); - } - } - } - - return this.sink.endFromJS(globalThis).toJS(globalThis); - } - - pub fn endWithSink(ptr: *anyopaque, globalThis: *JSGlobalObject) callconv(JSC.conv) JSValue { - JSC.markBinding(@src()); - - var this = @as(*ThisSink, @ptrCast(@alignCast(ptr))); - - if (comptime @hasDecl(SinkType, "getPendingError")) { - if (this.sink.getPendingError()) |err| { - return globalThis.throwValue(err); - } - } - - return this.sink.endFromJS(globalThis).toJS(globalThis); - } - - pub fn updateRef(ptr: *anyopaque, value: bool) callconv(.C) void { - JSC.markBinding(@src()); - var this = bun.cast(*ThisSink, ptr); - if (comptime @hasDecl(SinkType, "updateRef")) - this.sink.updateRef(value); - } - - const jsWrite = JSC.toJSHostFunction(write); - const jsFlush = JSC.toJSHostFunction(flush); - const jsStart = JSC.toJSHostFunction(start); - const jsEnd = JSC.toJSHostFunction(end); - const jsConstruct = JSC.toJSHostFunction(construct); - - fn jsGetInternalFd(ptr: *anyopaque) callconv(.C) JSValue { - var this = bun.cast(*ThisSink, ptr); - if (comptime @hasDecl(SinkType, "getFd")) { - return JSValue.jsNumber(this.sink.getFd()); - } - return .null; - } - - comptime { - @export(&finalize, .{ .name = abi_name ++ "__finalize" }); - @export(&jsWrite, .{ .name = abi_name ++ "__write" }); - @export(&jsGetInternalFd, .{ .name = abi_name ++ "__getInternalFd" }); - @export(&close, .{ .name = abi_name ++ "__close" }); - @export(&jsFlush, .{ .name = abi_name ++ "__flush" }); - @export(&jsStart, .{ .name = abi_name ++ "__start" }); - @export(&jsEnd, .{ .name = abi_name ++ "__end" }); - @export(&jsConstruct, .{ .name = abi_name ++ "__construct" }); - @export(&endWithSink, .{ .name = abi_name ++ "__endWithSink" }); - @export(&updateRef, .{ .name = abi_name ++ "__updateRef" }); - @export(&memoryCost, .{ .name = abi_name ++ "__memoryCost" }); - } - }; -} - -// pub fn NetworkSocket(comptime tls: bool) type { -// return struct { -// const Socket = uws.NewSocketHandler(tls); -// const ThisSocket = @This(); - -// socket: Socket, - -// pub fn connect(globalThis: *JSGlobalObject, callframe: *JSC.CallFrame) bun.JSError!JSC.JSValue { -// JSC.markBinding(@src()); - -// var this = @ptrCast(*ThisSocket, @alignCast( fromJS(globalThis, callframe.this()) orelse { -// const err = JSC.toTypeError(.INVALID_THIS, "Expected Socket", .{}, globalThis); -// globalThis.throwValue( err); -// return .zero; -// })); -// } -// }; -// } - -// TODO: make this JSGlobalObject local -// for better security -pub const ByteListPool = ObjectPool( - bun.ByteList, - null, - true, - 8, -); - pub fn HTTPServerWritable(comptime ssl: bool) type { return struct { const UWSResponse = uws.NewApp(ssl).Response; res: *UWSResponse, buffer: bun.ByteList, - pooled_buffer: ?*ByteListPool.Node = null, + pooled_buffer: ?*WebCore.ByteListPool.Node = null, offset: Blob.SizeType = 0, is_listening_for_abort: bool = false, @@ -2047,7 +704,7 @@ pub fn HTTPServerWritable(comptime ssl: bool) type { onFirstWrite: ?*const fn (?*anyopaque) void = null, ctx: ?*anyopaque = null, - auto_flusher: AutoFlusher = AutoFlusher{}, + auto_flusher: WebCore.AutoFlusher = .{}, const log = Output.scoped(.HTTPServerWritable, false); @@ -2194,7 +851,7 @@ pub fn HTTPServerWritable(comptime ssl: bool) type { return true; } - pub fn start(this: *@This(), stream_start: StreamStart) JSC.Maybe(void) { + pub fn start(this: *@This(), stream_start: Start) JSC.Maybe(void) { if (this.aborted or this.res.hasResponded()) { this.markDone(); this.signal.close(null); @@ -2208,7 +865,7 @@ pub fn HTTPServerWritable(comptime ssl: bool) type { if (this.buffer.cap == 0) { bun.assert(this.pooled_buffer == null); if (comptime FeatureFlags.http_buffer_pooling) { - if (ByteListPool.getIfExists()) |pooled_node| { + if (WebCore.ByteListPool.getIfExists()) |pooled_node| { this.pooled_buffer = pooled_node; this.buffer = this.pooled_buffer.?.data; } @@ -2313,7 +970,7 @@ pub fn HTTPServerWritable(comptime ssl: bool) type { return .{ .result = {} }; } - pub fn write(this: *@This(), data: StreamResult) StreamResult.Writable { + pub fn write(this: *@This(), data: Result) Result.Writable { if (this.done or this.requested_end) { return .{ .owned = 0 }; } @@ -2355,7 +1012,7 @@ pub fn HTTPServerWritable(comptime ssl: bool) type { return .{ .owned = len }; } pub const writeBytes = write; - pub fn writeLatin1(this: *@This(), data: StreamResult) StreamResult.Writable { + pub fn writeLatin1(this: *@This(), data: Result) Result.Writable { if (this.done or this.requested_end) { return .{ .owned = 0 }; } @@ -2413,7 +1070,7 @@ pub fn HTTPServerWritable(comptime ssl: bool) type { return .{ .owned = len }; } - pub fn writeUTF16(this: *@This(), data: StreamResult) StreamResult.Writable { + pub fn writeUTF16(this: *@This(), data: Result) Result.Writable { if (this.done or this.requested_end) { return .{ .owned = 0 }; } @@ -2613,10 +1270,12 @@ pub fn HTTPServerWritable(comptime ssl: bool) type { this.buffer = bun.ByteList.init(""); this.pooled_buffer = null; pooled.release(); - } else if (this.buffer.cap == 0) {} else if (FeatureFlags.http_buffer_pooling and !ByteListPool.full()) { + } else if (this.buffer.cap == 0) { + // + } else if (FeatureFlags.http_buffer_pooling and !WebCore.ByteListPool.full()) { const buffer = this.buffer; this.buffer = bun.ByteList.init(""); - ByteListPool.push(this.allocator, buffer); + WebCore.ByteListPool.push(this.allocator, buffer); } else { // Don't release this buffer until destroy() is called this.buffer.len = 0; @@ -2635,8 +1294,8 @@ pub fn HTTPServerWritable(comptime ssl: bool) type { } } - const name = if (ssl) "HTTPSResponseSink" else "HTTPResponseSink"; - pub const JSSink = NewJSSink(@This(), name); + pub const name = if (ssl) "HTTPSResponseSink" else "HTTPResponseSink"; + pub const JSSink = Sink.JSSink(@This(), name); }; } pub const HTTPSResponseSink = HTTPServerWritable(true); @@ -2707,7 +1366,7 @@ pub const NetworkSink = struct { return true; } - pub fn start(this: *@This(), stream_start: StreamStart) JSC.Maybe(void) { + pub fn start(this: *@This(), stream_start: Start) JSC.Maybe(void) { if (this.ended) { return .{ .result = {} }; } @@ -2827,7 +1486,7 @@ pub const NetworkSink = struct { this.finalize(); } - pub fn write(this: *@This(), data: StreamResult) StreamResult.Writable { + pub fn write(this: *@This(), data: Result) Result.Writable { if (this.ended) { return .{ .owned = 0 }; } @@ -2860,7 +1519,7 @@ pub const NetworkSink = struct { } pub const writeBytes = write; - pub fn writeLatin1(this: *@This(), data: StreamResult) StreamResult.Writable { + pub fn writeLatin1(this: *@This(), data: Result) Result.Writable { if (this.ended) { return .{ .owned = 0 }; } @@ -2911,7 +1570,7 @@ pub const NetworkSink = struct { return .{ .owned = len }; } - pub fn writeUTF16(this: *@This(), data: StreamResult) StreamResult.Writable { + pub fn writeUTF16(this: *@This(), data: Result) Result.Writable { if (this.ended) { return .{ .owned = 0 }; } @@ -2972,2421 +1631,54 @@ pub const NetworkSink = struct { return this.buffer.memoryCost(); } - const name = "NetworkSink"; - pub const JSSink = NewJSSink(@This(), name); -}; -pub const BufferedReadableStreamAction = enum { - text, - arrayBuffer, - blob, - bytes, - json, + pub const name = "NetworkSink"; + pub const JSSink = Sink.JSSink(@This(), name); }; -pub fn ReadableStreamSource( - comptime Context: type, - comptime name_: []const u8, - comptime onStart: anytype, - comptime onPull: anytype, - comptime onCancel: fn (this: *Context) void, - comptime deinit_fn: fn (this: *Context) void, - comptime setRefUnrefFn: ?fn (this: *Context, enable: bool) void, - comptime drainInternalBuffer: ?fn (this: *Context) bun.ByteList, - comptime memoryCostFn: ?fn (this: *const Context) usize, - comptime toBufferedValue: ?fn (this: *Context, globalThis: *JSC.JSGlobalObject, action: BufferedReadableStreamAction) bun.JSError!JSC.JSValue, -) type { - return struct { - context: Context, - cancelled: bool = false, - ref_count: u32 = 1, - pending_err: ?Syscall.Error = null, - close_handler: ?*const fn (?*anyopaque) void = null, - close_ctx: ?*anyopaque = null, - close_jsvalue: JSC.Strong = .empty, - globalThis: *JSGlobalObject = undefined, - this_jsvalue: JSC.JSValue = .zero, - is_closed: bool = false, +pub const BufferAction = union(enum) { + text: JSC.JSPromise.Strong, + arrayBuffer: JSC.JSPromise.Strong, + blob: JSC.JSPromise.Strong, + bytes: JSC.JSPromise.Strong, + json: JSC.JSPromise.Strong, - const This = @This(); - const ReadableStreamSourceType = @This(); + pub const Tag = @typeInfo(BufferAction).@"union".tag_type.?; - pub const new = bun.TrivialNew(@This()); - pub const deinit = bun.TrivialDeinit(@This()); - - pub fn pull(this: *This, buf: []u8) StreamResult { - return onPull(&this.context, buf, JSValue.zero); - } - - pub fn ref(this: *This) void { - if (setRefUnrefFn) |setRefUnref| { - setRefUnref(&this.context, true); - } - } - - pub fn unref(this: *This) void { - if (setRefUnrefFn) |setRefUnref| { - setRefUnref(&this.context, false); - } - } - - pub fn setRef(this: *This, value: bool) void { - if (setRefUnrefFn) |setRefUnref| { - setRefUnref(&this.context, value); - } - } - - pub fn start( - this: *This, - ) StreamStart { - return onStart(&this.context); - } - - pub fn onPullFromJS(this: *This, buf: []u8, view: JSValue) StreamResult { - return onPull(&this.context, buf, view); - } - - pub fn onStartFromJS(this: *This) StreamStart { - return onStart(&this.context); - } - - pub fn cancel(this: *This) void { - if (this.cancelled) { - return; - } - - this.cancelled = true; - onCancel(&this.context); - } - - pub fn onClose(this: *This) void { - if (this.cancelled) { - return; - } - - if (this.close_handler) |close| { - this.close_handler = null; - if (close == &JSReadableStreamSource.onClose) { - JSReadableStreamSource.onClose(this); - } else { - close(this.close_ctx); - } - } - } - - pub fn incrementCount(this: *This) void { - this.ref_count += 1; - } - - pub fn decrementCount(this: *This) u32 { - if (comptime Environment.isDebug) { - if (this.ref_count == 0) { - @panic("Attempted to decrement ref count below zero"); - } - } - - this.ref_count -= 1; - if (this.ref_count == 0) { - this.close_jsvalue.deinit(); - deinit_fn(&this.context); - return 0; - } - - return this.ref_count; - } - - pub fn getError(this: *This) ?Syscall.Error { - if (this.pending_err) |err| { - this.pending_err = null; - return err; - } - - return null; - } - - pub fn drain(this: *This) bun.ByteList { - if (drainInternalBuffer) |drain_fn| { - return drain_fn(&this.context); - } - - return .{}; - } - - pub fn toReadableStream(this: *ReadableStreamSourceType, globalThis: *JSGlobalObject) JSC.JSValue { - const out_value = brk: { - if (this.this_jsvalue != .zero) { - break :brk this.this_jsvalue; - } - - break :brk this.toJS(globalThis); - }; - out_value.ensureStillAlive(); - this.this_jsvalue = out_value; - return ReadableStream.fromNative(globalThis, out_value); - } - - pub fn setRawModeFromJS(this: *ReadableStreamSourceType, global: *JSC.JSGlobalObject, call_frame: *JSC.CallFrame) bun.JSError!JSValue { - if (@hasDecl(Context, "setRawMode")) { - const flag = call_frame.argument(0); - if (Environment.allow_assert) { - bun.assert(flag.isBoolean()); - } - return switch (this.context.setRawMode(flag == .true)) { - .result => .undefined, - .err => |e| e.toJSC(global), - }; - } - - @compileError("setRawMode is not implemented on " ++ @typeName(Context)); - } - - const supports_ref = setRefUnrefFn != null; - - pub const js = @field(JSC.Codegen, "JS" ++ name_ ++ "InternalReadableStreamSource"); - pub const toJS = js.toJS; - pub const fromJS = js.fromJS; - pub const fromJSDirect = js.fromJSDirect; - - pub const drainFromJS = JSReadableStreamSource.drain; - pub const startFromJS = JSReadableStreamSource.start; - pub const pullFromJS = JSReadableStreamSource.pull; - pub const cancelFromJS = JSReadableStreamSource.cancel; - pub const updateRefFromJS = JSReadableStreamSource.updateRef; - pub const setOnCloseFromJS = JSReadableStreamSource.setOnCloseFromJS; - pub const getOnCloseFromJS = JSReadableStreamSource.getOnCloseFromJS; - pub const setOnDrainFromJS = JSReadableStreamSource.setOnDrainFromJS; - pub const getOnDrainFromJS = JSReadableStreamSource.getOnDrainFromJS; - pub const finalize = JSReadableStreamSource.finalize; - pub const construct = JSReadableStreamSource.construct; - pub const getIsClosedFromJS = JSReadableStreamSource.isClosed; - pub const textFromJS = JSReadableStreamSource.text; - pub const jsonFromJS = JSReadableStreamSource.json; - pub const arrayBufferFromJS = JSReadableStreamSource.arrayBuffer; - pub const blobFromJS = JSReadableStreamSource.blob; - pub const bytesFromJS = JSReadableStreamSource.bytes; - - pub fn memoryCost(this: *const ReadableStreamSourceType) usize { - if (memoryCostFn) |function| { - return function(&this.context) + @sizeOf(@This()); - } - return @sizeOf(@This()); - } - - pub const JSReadableStreamSource = struct { - pub fn pull(this: *ReadableStreamSourceType, globalThis: *JSGlobalObject, callFrame: *JSC.CallFrame) bun.JSError!JSC.JSValue { - JSC.markBinding(@src()); - const this_jsvalue = callFrame.this(); - const arguments = callFrame.arguments_old(2); - const view = arguments.ptr[0]; - view.ensureStillAlive(); - this.this_jsvalue = this_jsvalue; - var buffer = view.asArrayBuffer(globalThis) orelse return .undefined; - return processResult( - this_jsvalue, - globalThis, - arguments.ptr[1], - this.onPullFromJS(buffer.slice(), view), - ); - } - - pub fn start(this: *ReadableStreamSourceType, globalThis: *JSGlobalObject, callFrame: *JSC.CallFrame) bun.JSError!JSC.JSValue { - JSC.markBinding(@src()); - this.globalThis = globalThis; - this.this_jsvalue = callFrame.this(); - switch (this.onStartFromJS()) { - .empty => return JSValue.jsNumber(0), - .ready => return JSValue.jsNumber(16384), - .chunk_size => |size| return JSValue.jsNumber(size), - .err => |err| { - return globalThis.throwValue(err.toJSC(globalThis)); - }, - else => |rc| { - return rc.toJS(globalThis); - }, - } - } - - pub fn isClosed(this: *ReadableStreamSourceType, globalObject: *JSC.JSGlobalObject) JSC.JSValue { - _ = globalObject; // autofix - return JSC.JSValue.jsBoolean(this.is_closed); - } - - fn processResult(this_jsvalue: JSC.JSValue, globalThis: *JSGlobalObject, flags: JSValue, result: StreamResult) bun.JSError!JSC.JSValue { - switch (result) { - .err => |err| { - if (err == .Error) { - return globalThis.throwValue(err.Error.toJSC(globalThis)); - } else { - const js_err = err.JSValue; - js_err.ensureStillAlive(); - js_err.unprotect(); - return globalThis.throwValue(js_err); - } - }, - .pending => { - const out = result.toJS(globalThis); - js.pendingPromiseSetCached(this_jsvalue, globalThis, out); - return out; - }, - .temporary_and_done, .owned_and_done, .into_array_and_done => { - JSC.C.JSObjectSetPropertyAtIndex(globalThis, flags.asObjectRef(), 0, JSValue.jsBoolean(true).asObjectRef(), null); - return result.toJS(globalThis); - }, - else => return result.toJS(globalThis), - } - } - - pub fn cancel(this: *ReadableStreamSourceType, globalObject: *JSC.JSGlobalObject, callFrame: *JSC.CallFrame) bun.JSError!JSC.JSValue { - _ = globalObject; // autofix - JSC.markBinding(@src()); - this.this_jsvalue = callFrame.this(); - this.cancel(); - return .undefined; - } - - pub fn setOnCloseFromJS(this: *ReadableStreamSourceType, globalObject: *JSC.JSGlobalObject, value: JSC.JSValue) bool { - JSC.markBinding(@src()); - this.close_handler = JSReadableStreamSource.onClose; - this.globalThis = globalObject; - - if (value.isUndefined()) { - this.close_jsvalue.deinit(); - return true; - } - - if (!value.isCallable()) { - globalObject.throwInvalidArgumentType("ReadableStreamSource", "onclose", "function") catch {}; - return false; - } - const cb = value.withAsyncContextIfNeeded(globalObject); - this.close_jsvalue.set(globalObject, cb); - return true; - } - - pub fn setOnDrainFromJS(this: *ReadableStreamSourceType, globalObject: *JSC.JSGlobalObject, value: JSC.JSValue) bool { - JSC.markBinding(@src()); - this.globalThis = globalObject; - - if (value.isUndefined()) { - js.onDrainCallbackSetCached(this.this_jsvalue, globalObject, .undefined); - return true; - } - - if (!value.isCallable()) { - globalObject.throwInvalidArgumentType("ReadableStreamSource", "onDrain", "function") catch {}; - return false; - } - const cb = value.withAsyncContextIfNeeded(globalObject); - js.onDrainCallbackSetCached(this.this_jsvalue, globalObject, cb); - return true; - } - - pub fn getOnCloseFromJS(this: *ReadableStreamSourceType, globalObject: *JSC.JSGlobalObject) JSC.JSValue { - _ = globalObject; // autofix - - JSC.markBinding(@src()); - - return this.close_jsvalue.get() orelse .undefined; - } - - pub fn getOnDrainFromJS(this: *ReadableStreamSourceType, globalObject: *JSC.JSGlobalObject) JSC.JSValue { - _ = globalObject; // autofix - - JSC.markBinding(@src()); - - if (js.onDrainCallbackGetCached(this.this_jsvalue)) |val| { - return val; - } - - return .undefined; - } - - pub fn updateRef(this: *ReadableStreamSourceType, globalObject: *JSGlobalObject, callFrame: *JSC.CallFrame) bun.JSError!JSC.JSValue { - _ = globalObject; // autofix - JSC.markBinding(@src()); - this.this_jsvalue = callFrame.this(); - const ref_or_unref = callFrame.argument(0).toBoolean(); - this.setRef(ref_or_unref); - - return .undefined; - } - - fn onClose(ptr: ?*anyopaque) void { - JSC.markBinding(@src()); - var this = bun.cast(*ReadableStreamSourceType, ptr.?); - if (this.close_jsvalue.trySwap()) |cb| { - this.globalThis.queueMicrotask(cb, &.{}); - } - - this.close_jsvalue.deinit(); - } - - pub fn finalize(this: *ReadableStreamSourceType) void { - this.this_jsvalue = .zero; - - _ = this.decrementCount(); - } - - pub fn drain(this: *ReadableStreamSourceType, globalThis: *JSGlobalObject, callFrame: *JSC.CallFrame) bun.JSError!JSC.JSValue { - JSC.markBinding(@src()); - this.this_jsvalue = callFrame.this(); - var list = this.drain(); - if (list.len > 0) { - return JSC.ArrayBuffer.fromBytes(list.slice(), .Uint8Array).toJS(globalThis, null); - } - return JSValue.jsUndefined(); - } - - pub fn text(this: *ReadableStreamSourceType, globalThis: *JSGlobalObject, callFrame: *JSC.CallFrame) bun.JSError!JSC.JSValue { - JSC.markBinding(@src()); - this.this_jsvalue = callFrame.this(); - - if (toBufferedValue) |to_buffered_value| { - return to_buffered_value(&this.context, globalThis, .text); - } - - globalThis.throwTODO("This is not implemented yet"); - return .zero; - } - - pub fn arrayBuffer(this: *ReadableStreamSourceType, globalThis: *JSGlobalObject, callFrame: *JSC.CallFrame) bun.JSError!JSC.JSValue { - JSC.markBinding(@src()); - this.this_jsvalue = callFrame.this(); - - if (toBufferedValue) |to_buffered_value| { - return to_buffered_value(&this.context, globalThis, .arrayBuffer); - } - - globalThis.throwTODO("This is not implemented yet"); - return .zero; - } - - pub fn blob(this: *ReadableStreamSourceType, globalThis: *JSGlobalObject, callFrame: *JSC.CallFrame) bun.JSError!JSC.JSValue { - JSC.markBinding(@src()); - this.this_jsvalue = callFrame.this(); - - if (toBufferedValue) |to_buffered_value| { - return to_buffered_value(&this.context, globalThis, .blob); - } - - globalThis.throwTODO("This is not implemented yet"); - return .zero; - } - - pub fn bytes(this: *ReadableStreamSourceType, globalThis: *JSGlobalObject, callFrame: *JSC.CallFrame) bun.JSError!JSC.JSValue { - JSC.markBinding(@src()); - this.this_jsvalue = callFrame.this(); - - if (toBufferedValue) |to_buffered_value| { - return to_buffered_value(&this.context, globalThis, .bytes); - } - - globalThis.throwTODO("This is not implemented yet"); - return .zero; - } - - pub fn json(this: *ReadableStreamSourceType, globalThis: *JSGlobalObject, callFrame: *JSC.CallFrame) bun.JSError!JSC.JSValue { - JSC.markBinding(@src()); - this.this_jsvalue = callFrame.this(); - - if (toBufferedValue) |to_buffered_value| { - return to_buffered_value(&this.context, globalThis, .json); - } - - globalThis.throwTODO("This is not implemented yet"); - return .zero; - } - }; - }; -} - -pub const AutoFlusher = @import("./AutoFlusher.zig"); - -pub const FileSink = struct { - ref_count: RefCount, - writer: IOWriter = .{}, - event_loop_handle: JSC.EventLoopHandle, - written: usize = 0, - pending: StreamResult.Writable.Pending = .{ - .result = .{ .done = {} }, - }, - signal: Signal = Signal{}, - done: bool = false, - started: bool = false, - must_be_kept_alive_until_eof: bool = false, - - // TODO: these fields are duplicated on writer() - // we should not duplicate these fields... - pollable: bool = false, - nonblocking: bool = false, - force_sync: bool = false, - - is_socket: bool = false, - fd: bun.FileDescriptor = bun.invalid_fd, - - auto_flusher: AutoFlusher = .{}, - run_pending_later: FlushPendingFileSinkTask = .{}, - - const log = Output.scoped(.FileSink, false); - - pub const RefCount = bun.ptr.RefCount(FileSink, "ref_count", deinit, .{}); - pub const ref = RefCount.ref; - pub const deref = RefCount.deref; - - pub const IOWriter = bun.io.StreamingWriter(@This(), opaque { - pub const onClose = FileSink.onClose; - pub const onWritable = FileSink.onReady; - pub const onError = FileSink.onError; - pub const onWrite = FileSink.onWrite; - }); - pub const Poll = IOWriter; - - pub fn memoryCost(this: *const FileSink) usize { - // Since this is a JSSink, the NewJSSink function does @sizeOf(JSSink) which includes @sizeOf(FileSink). - return this.writer.memoryCost(); + pub fn fulfill(this: *BufferAction, global: *JSC.JSGlobalObject, blob: *AnyBlob) void { + blob.wrap(.{ .normal = this.swap() }, global, this.*); } - fn Bun__ForceFileSinkToBeSynchronousForProcessObjectStdio(_: *JSC.JSGlobalObject, jsvalue: JSC.JSValue) callconv(.C) void { - var this: *FileSink = @alignCast(@ptrCast(JSSink.fromJS(jsvalue) orelse return)); - this.force_sync = true; - if (comptime !Environment.isWindows) { - this.writer.force_sync = true; - if (this.fd != bun.invalid_fd) { - _ = bun.sys.updateNonblocking(this.fd, false); - } - } + pub fn reject(this: *BufferAction, global: *JSC.JSGlobalObject, err: Result.StreamError) void { + this.swap().reject(global, err.toJSWeak(global)[0]); } - comptime { - @export(&Bun__ForceFileSinkToBeSynchronousForProcessObjectStdio, .{ .name = "Bun__ForceFileSinkToBeSynchronousForProcessObjectStdio" }); + pub fn resolve(this: *BufferAction, global: *JSC.JSGlobalObject, result: JSC.JSValue) void { + this.swap().resolve(global, result); } - pub fn onAttachedProcessExit(this: *FileSink) void { - log("onAttachedProcessExit()", .{}); - this.done = true; - this.writer.close(); - - this.pending.result = .{ .err = Syscall.Error.fromCode(.PIPE, .write) }; - - this.runPending(); - - if (this.must_be_kept_alive_until_eof) { - this.must_be_kept_alive_until_eof = false; - this.deref(); - } - } - - fn runPending(this: *FileSink) void { - this.ref(); - defer this.deref(); - - this.run_pending_later.has = false; - const l = this.eventLoop(); - l.enter(); - defer l.exit(); - this.pending.run(); - } - - pub fn onWrite(this: *FileSink, amount: usize, status: bun.io.WriteStatus) void { - log("onWrite({d}, {any})", .{ amount, status }); - - this.written += amount; - - // TODO: on windows done means ended (no pending data on the buffer) on unix we can still have pending data on the buffer - // we should unify the behaviors to simplify this - const has_pending_data = this.writer.hasPendingData(); - // Only keep the event loop ref'd while there's a pending write in progress. - // If there's no pending write, no need to keep the event loop ref'd. - this.writer.updateRef(this.eventLoop(), has_pending_data); - - if (has_pending_data) { - if (this.event_loop_handle.bunVM()) |vm| { - if (!vm.is_inside_deferred_task_queue) { - AutoFlusher.registerDeferredMicrotaskWithType(@This(), this, vm); - } - } - } - - // if we are not done yet and has pending data we just wait so we do not runPending twice - if (status == .pending and has_pending_data) { - if (this.pending.state == .pending) { - this.pending.consumed = @truncate(amount); - } - return; - } - - if (this.pending.state == .pending) { - this.pending.consumed = @truncate(amount); - - // when "done" is true, we will never receive more data. - if (this.done or status == .end_of_file) { - this.pending.result = .{ .owned_and_done = this.pending.consumed }; - } else { - this.pending.result = .{ .owned = this.pending.consumed }; - } - - this.runPending(); - - // this.done == true means ended was called - const ended_and_done = this.done and status == .end_of_file; - - if (this.done and status == .drained) { - // if we call end/endFromJS and we have some pending returned from .flush() we should call writer.end() - this.writer.end(); - } else if (ended_and_done and !has_pending_data) { - this.writer.close(); - } - } - - if (status == .end_of_file) { - if (this.must_be_kept_alive_until_eof) { - this.must_be_kept_alive_until_eof = false; - this.deref(); - } - this.signal.close(null); - } - } - - pub fn onError(this: *FileSink, err: bun.sys.Error) void { - log("onError({any})", .{err}); - if (this.pending.state == .pending) { - this.pending.result = .{ .err = err }; - if (this.eventLoop().bunVM()) |vm| { - if (vm.is_inside_deferred_task_queue) { - this.runPendingLater(); - return; - } - } - - this.runPending(); - } - } - - pub fn onReady(this: *FileSink) void { - log("onReady()", .{}); - - this.signal.ready(null, null); - } - - pub fn onClose(this: *FileSink) void { - log("onClose()", .{}); - this.signal.close(null); - } - - pub fn createWithPipe( - event_loop_: anytype, - pipe: *uv.Pipe, - ) *FileSink { - if (Environment.isPosix) { - @compileError("FileSink.createWithPipe is only available on Windows"); - } - - const evtloop = switch (@TypeOf(event_loop_)) { - JSC.EventLoopHandle => event_loop_, - else => JSC.EventLoopHandle.init(event_loop_), - }; - - var this = bun.new(FileSink, .{ - .ref_count = .init(), - .event_loop_handle = JSC.EventLoopHandle.init(evtloop), - .fd = pipe.fd(), - }); - this.writer.setPipe(pipe); - this.writer.setParent(this); - return this; - } - - pub fn create( - event_loop_: anytype, - fd: bun.FileDescriptor, - ) *FileSink { - const evtloop = switch (@TypeOf(event_loop_)) { - JSC.EventLoopHandle => event_loop_, - else => JSC.EventLoopHandle.init(event_loop_), - }; - var this = bun.new(FileSink, .{ - .ref_count = .init(), - .event_loop_handle = JSC.EventLoopHandle.init(evtloop), - .fd = fd, - }); - this.writer.setParent(this); - return this; - } - - pub fn setup(this: *FileSink, options: *const StreamStart.FileSinkOptions) JSC.Maybe(void) { - // TODO: this should be concurrent. - var isatty = false; - var is_nonblocking = false; - const fd = switch (switch (options.input_path) { - .path => |path| brk: { - is_nonblocking = true; - break :brk bun.sys.openA(path.slice(), options.flags(), options.mode); - }, - .fd => |fd_| brk: { - const duped = bun.sys.dupWithFlags(fd_, 0); - - break :brk duped; - }, - }) { - .err => |err| return .{ .err = err }, - .result => |fd| fd, - }; - - if (comptime Environment.isPosix) { - switch (bun.sys.fstat(fd)) { - .err => |err| { - fd.close(); - return .{ .err = err }; - }, - .result => |stat| { - this.pollable = bun.sys.isPollable(stat.mode); - if (!this.pollable) { - isatty = std.posix.isatty(fd.native()); - } - - if (isatty) { - this.pollable = true; - } - - this.fd = fd; - this.is_socket = std.posix.S.ISSOCK(stat.mode); - - if (this.force_sync or isatty) { - // Prevents interleaved or dropped stdout/stderr output for terminals. - // As noted in the following reference, local TTYs tend to be quite fast and - // this behavior has become expected due historical functionality on OS X, - // even though it was originally intended to change in v1.0.2 (Libuv 1.2.1). - // Ref: https://github.com/nodejs/node/pull/1771#issuecomment-119351671 - _ = bun.sys.updateNonblocking(fd, false); - is_nonblocking = false; - this.force_sync = true; - this.writer.force_sync = true; - } else if (!is_nonblocking) { - const flags = switch (bun.sys.getFcntlFlags(fd)) { - .result => |flags| flags, - .err => |err| { - fd.close(); - return .{ .err = err }; - }, - }; - is_nonblocking = (flags & @as(@TypeOf(flags), bun.O.NONBLOCK)) != 0; - - if (!is_nonblocking) { - if (bun.sys.setNonblocking(fd) == .result) { - is_nonblocking = true; - } - } - } - - this.nonblocking = is_nonblocking and this.pollable; - }, - } - } else if (comptime Environment.isWindows) { - this.pollable = (bun.windows.GetFileType(fd.cast()) & bun.windows.FILE_TYPE_PIPE) != 0 and !this.force_sync; - this.fd = fd; - } else { - @compileError("TODO: implement for this platform"); - } - - if (comptime Environment.isWindows) { - if (this.force_sync) { - switch (this.writer.startSync( - fd, - this.pollable, - )) { - .err => |err| { - fd.close(); - return .{ .err = err }; - }, - .result => { - this.writer.updateRef(this.eventLoop(), false); - }, - } - return .{ .result = {} }; - } - } - - switch (this.writer.start( - fd, - this.pollable, - )) { - .err => |err| { - fd.close(); - return .{ .err = err }; - }, - .result => { - // Only keep the event loop ref'd while there's a pending write in progress. - // If there's no pending write, no need to keep the event loop ref'd. - this.writer.updateRef(this.eventLoop(), false); - if (comptime Environment.isPosix) { - if (this.nonblocking) { - this.writer.getPoll().?.flags.insert(.nonblocking); - } - - if (this.is_socket) { - this.writer.getPoll().?.flags.insert(.socket); - } else if (this.pollable) { - this.writer.getPoll().?.flags.insert(.fifo); - } - } - }, - } - - return .{ .result = {} }; - } - - pub fn loop(this: *FileSink) *Async.Loop { - return this.event_loop_handle.loop(); - } - - pub fn eventLoop(this: *FileSink) JSC.EventLoopHandle { - return this.event_loop_handle; - } - - pub fn connect(this: *FileSink, signal: Signal) void { - this.signal = signal; - } - - pub fn start(this: *FileSink, stream_start: StreamStart) JSC.Maybe(void) { - switch (stream_start) { - .FileSink => |*file| { - switch (this.setup(file)) { - .err => |err| { - return .{ .err = err }; - }, - .result => {}, - } - }, - else => {}, - } - - this.done = false; - this.started = true; - this.signal.start(); - return .{ .result = {} }; - } - - pub fn runPendingLater(this: *FileSink) void { - if (this.run_pending_later.has) { - return; - } - this.run_pending_later.has = true; - const event_loop = this.eventLoop(); - if (event_loop == .js) { - this.ref(); - event_loop.js.enqueueTask(JSC.Task.init(&this.run_pending_later)); - } - } - - pub fn onAutoFlush(this: *FileSink) bool { - if (this.done or !this.writer.hasPendingData()) { - this.updateRef(false); - this.auto_flusher.registered = false; - return false; - } - - this.ref(); - defer this.deref(); - - const amount_buffered = this.writer.outgoing.size(); - - switch (this.writer.flush()) { - .err, .done => { - this.updateRef(false); - this.runPendingLater(); - }, - .wrote => |amount_drained| { - if (amount_drained == amount_buffered) { - this.updateRef(false); - this.runPendingLater(); - } - }, - else => { - return true; - }, - } - - const is_registered = !this.writer.hasPendingData(); - this.auto_flusher.registered = is_registered; - return is_registered; - } - - pub fn flush(_: *FileSink) JSC.Maybe(void) { - return .{ .result = {} }; - } - - pub fn flushFromJS(this: *FileSink, globalThis: *JSGlobalObject, wait: bool) JSC.Maybe(JSValue) { - _ = wait; - - if (this.pending.state == .pending) { - return .{ .result = this.pending.future.promise.strong.value() }; - } - - if (this.done) { - return .{ .result = .undefined }; - } - - const rc = this.writer.flush(); - switch (rc) { - .done => |written| { - this.written += @truncate(written); - }, - .pending => |written| { - this.written += @truncate(written); - }, - .wrote => |written| { - this.written += @truncate(written); - }, - .err => |err| { - return .{ .err = err }; - }, - } - return switch (this.toResult(rc)) { - .err => unreachable, - else => |result| .{ .result = result.toJS(globalThis) }, + pub fn value(this: *BufferAction) JSC.JSValue { + return switch (this.*) { + inline else => |promise| promise.value(), }; } - pub fn finalize(this: *FileSink) void { - this.pending.deinit(); - this.deref(); - } - - pub fn init(fd: bun.FileDescriptor, event_loop_handle: anytype) *FileSink { - var this = bun.new(FileSink, .{ - .ref_count = .init(), - .writer = .{}, - .fd = fd, - .event_loop_handle = JSC.EventLoopHandle.init(event_loop_handle), - }); - this.writer.setParent(this); - - return this; - } - - pub fn construct(this: *FileSink, _: std.mem.Allocator) void { - this.* = FileSink{ - .ref_count = .init(), - .event_loop_handle = JSC.EventLoopHandle.init(JSC.VirtualMachine.get().eventLoop()), + pub fn get(this: *BufferAction) *JSC.JSPromise { + return switch (this.*) { + inline else => |promise| promise.get(), }; } - pub fn write(this: *@This(), data: StreamResult) StreamResult.Writable { - if (this.done) { - return .{ .done = {} }; - } - - return this.toResult(this.writer.write(data.slice())); - } - pub const writeBytes = write; - pub fn writeLatin1(this: *@This(), data: StreamResult) StreamResult.Writable { - if (this.done) { - return .{ .done = {} }; - } - - return this.toResult(this.writer.writeLatin1(data.slice())); - } - pub fn writeUTF16(this: *@This(), data: StreamResult) StreamResult.Writable { - if (this.done) { - return .{ .done = {} }; - } - - return this.toResult(this.writer.writeUTF16(data.slice16())); - } - - pub fn end(this: *FileSink, err: ?Syscall.Error) JSC.Maybe(void) { - if (this.done) { - return .{ .result = {} }; - } - - _ = err; // autofix - - switch (this.writer.flush()) { - .done => |written| { - this.written += @truncate(written); - this.writer.end(); - return .{ .result = {} }; - }, - .err => |e| { - this.writer.close(); - return .{ .err = e }; - }, - .pending => |written| { - this.written += @truncate(written); - if (!this.must_be_kept_alive_until_eof) { - this.must_be_kept_alive_until_eof = true; - this.ref(); - } - this.done = true; - return .{ .result = {} }; - }, - .wrote => |written| { - this.written += @truncate(written); - this.writer.end(); - return .{ .result = {} }; - }, - } - } - - fn deinit(this: *FileSink) void { - this.pending.deinit(); - this.writer.deinit(); - if (this.event_loop_handle.globalObject()) |global| { - AutoFlusher.unregisterDeferredMicrotaskWithType(@This(), this, global.bunVM()); - } - bun.destroy(this); - } - - pub fn toJS(this: *FileSink, globalThis: *JSGlobalObject) JSValue { - return JSSink.createObject(globalThis, this, 0); - } - - pub fn toJSWithDestructor(this: *FileSink, globalThis: *JSGlobalObject, destructor: ?SinkDestructor.Ptr) JSValue { - return JSSink.createObject(globalThis, this, if (destructor) |dest| @intFromPtr(dest.ptr()) else 0); - } - - pub fn endFromJS(this: *FileSink, globalThis: *JSGlobalObject) JSC.Maybe(JSValue) { - if (this.done) { - if (this.pending.state == .pending) { - return .{ .result = this.pending.future.promise.strong.value() }; - } - - return .{ .result = JSValue.jsNumber(this.written) }; - } - - switch (this.writer.flush()) { - .done => |written| { - this.updateRef(false); - this.writer.end(); - return .{ .result = JSValue.jsNumber(written) }; - }, - .err => |err| { - this.writer.close(); - return .{ .err = err }; - }, - .pending => |pending_written| { - this.written += @truncate(pending_written); - if (!this.must_be_kept_alive_until_eof) { - this.must_be_kept_alive_until_eof = true; - this.ref(); - } - this.done = true; - this.pending.result = .{ .owned = @truncate(pending_written) }; - return .{ .result = this.pending.promise(globalThis).asValue(globalThis) }; - }, - .wrote => |written| { - this.writer.end(); - return .{ .result = JSValue.jsNumber(written) }; - }, - } - } - - pub fn sink(this: *FileSink) Sink { - return Sink.init(this); - } - - pub fn updateRef(this: *FileSink, value: bool) void { - if (value) { - this.writer.enableKeepingProcessAlive(this.event_loop_handle); - } else { - this.writer.disableKeepingProcessAlive(this.event_loop_handle); - } - } - - pub const JSSink = NewJSSink(@This(), "FileSink"); - - fn getFd(this: *const @This()) i32 { - if (Environment.isWindows) { - return switch (this.fd.decodeWindows()) { - .windows => -1, // TODO: - .uv => |num| num, - }; - } - return this.fd.cast(); - } - - fn toResult(this: *FileSink, write_result: bun.io.WriteResult) StreamResult.Writable { - switch (write_result) { - .done => |amt| { - if (amt > 0) - return .{ .owned_and_done = @truncate(amt) }; - - return .{ .done = {} }; - }, - .wrote => |amt| { - if (amt > 0) - return .{ .owned = @truncate(amt) }; - - return .{ .temporary = @truncate(amt) }; - }, - .err => |err| { - return .{ .err = err }; - }, - .pending => |pending_written| { - if (!this.must_be_kept_alive_until_eof) { - this.must_be_kept_alive_until_eof = true; - this.ref(); - } - this.pending.consumed += @truncate(pending_written); - this.pending.result = .{ .owned = @truncate(pending_written) }; - return .{ .pending = &this.pending }; - }, - } - } -}; - -pub const FlushPendingFileSinkTask = struct { - has: bool = false, - pub fn runFromJSThread(flush_pending: *FlushPendingFileSinkTask) void { - const had = flush_pending.has; - flush_pending.has = false; - const this: *FileSink = @alignCast(@fieldParentPtr("run_pending_later", flush_pending)); - defer this.deref(); - if (had) - this.runPending(); - } -}; - -pub const FileReader = struct { - const log = Output.scoped(.FileReader, false); - reader: IOReader = IOReader.init(FileReader), - done: bool = false, - pending: StreamResult.Pending = .{}, - pending_value: JSC.Strong = .empty, - pending_view: []u8 = &.{}, - fd: bun.FileDescriptor = bun.invalid_fd, - start_offset: ?usize = null, - max_size: ?usize = null, - total_readed: usize = 0, - started: bool = false, - waiting_for_onReaderDone: bool = false, - event_loop: JSC.EventLoopHandle, - lazy: Lazy = .{ .none = {} }, - buffered: std.ArrayListUnmanaged(u8) = .{}, - read_inside_on_pull: ReadDuringJSOnPullResult = .{ .none = {} }, - highwater_mark: usize = 16384, - - pub const IOReader = bun.io.BufferedReader; - pub const Poll = IOReader; - pub const tag = ReadableStream.Tag.File; - - const ReadDuringJSOnPullResult = union(enum) { - none: void, - js: []u8, - amount_read: usize, - temporary: []const u8, - use_buffered: usize, - }; - - pub const Lazy = union(enum) { - none: void, - blob: *Blob.Store, - - const OpenedFileBlob = struct { - fd: bun.FileDescriptor, - pollable: bool = false, - nonblocking: bool = true, - file_type: bun.io.FileType = .file, - }; - - pub fn openFileBlob(file: *Blob.FileStore) JSC.Maybe(OpenedFileBlob) { - var this = OpenedFileBlob{ .fd = bun.invalid_fd }; - var file_buf: bun.PathBuffer = undefined; - var is_nonblocking = false; - - const fd: bun.FD = if (file.pathlike == .fd) - if (file.pathlike.fd.stdioTag() != null) brk: { - if (comptime Environment.isPosix) { - const rc = bun.C.open_as_nonblocking_tty(file.pathlike.fd.native(), bun.O.RDONLY); - if (rc > -1) { - is_nonblocking = true; - file.is_atty = true; - break :brk .fromNative(rc); - } - } - break :brk file.pathlike.fd; - } else brk: { - const duped = Syscall.dupWithFlags(file.pathlike.fd, 0); - - if (duped != .result) { - return .{ .err = duped.err.withFd(file.pathlike.fd) }; - } - - const fd: bun.FD = duped.result; - if (comptime Environment.isPosix) { - if (fd.stdioTag() == null) { - is_nonblocking = switch (fd.getFcntlFlags()) { - .result => |flags| (flags & bun.O.NONBLOCK) != 0, - .err => false, - }; - } - } - - break :brk switch (fd.makeLibUVOwnedForSyscall(.dup, .close_on_fail)) { - .result => |owned_fd| owned_fd, - .err => |err| { - return .{ .err = err }; - }, - }; - } - else switch (Syscall.open(file.pathlike.path.sliceZ(&file_buf), bun.O.RDONLY | bun.O.NONBLOCK | bun.O.CLOEXEC, 0)) { - .result => |fd| fd, - .err => |err| { - return .{ .err = err.withPath(file.pathlike.path.slice()) }; - }, - }; - - if (comptime Environment.isPosix) { - if ((file.is_atty orelse false) or - (fd.stdioTag() != null and std.posix.isatty(fd.cast())) or - (file.pathlike == .fd and - file.pathlike.fd.stdioTag() != null and - std.posix.isatty(file.pathlike.fd.cast()))) - { - // var termios = std.mem.zeroes(std.posix.termios); - // _ = std.c.tcgetattr(fd.cast(), &termios); - // bun.C.cfmakeraw(&termios); - // _ = std.c.tcsetattr(fd.cast(), std.posix.TCSA.NOW, &termios); - file.is_atty = true; - } - - const stat: bun.Stat = switch (Syscall.fstat(fd)) { - .result => |result| result, - .err => |err| { - fd.close(); - return .{ .err = err }; - }, - }; - - if (bun.S.ISDIR(stat.mode)) { - bun.Async.Closer.close(fd, {}); - return .{ .err = Syscall.Error.fromCode(.ISDIR, .fstat) }; - } - - this.pollable = bun.sys.isPollable(stat.mode) or is_nonblocking or (file.is_atty orelse false); - this.file_type = if (bun.S.ISFIFO(stat.mode)) - .pipe - else if (bun.S.ISSOCK(stat.mode)) - .socket - else - .file; - - // pretend it's a non-blocking pipe if it's a TTY - if (is_nonblocking and this.file_type != .socket) { - this.file_type = .nonblocking_pipe; - } - - this.nonblocking = is_nonblocking or (this.pollable and !(file.is_atty orelse false)); - - if (this.nonblocking and this.file_type == .pipe) { - this.file_type = .nonblocking_pipe; - } - } - - this.fd = fd; - - return .{ .result = this }; - } - }; - - pub fn eventLoop(this: *const FileReader) JSC.EventLoopHandle { - return this.event_loop; - } - - pub fn loop(this: *const FileReader) *Async.Loop { - return this.eventLoop().loop(); - } - - pub fn setup( - this: *FileReader, - fd: bun.FileDescriptor, - ) void { - this.* = FileReader{ - .reader = .{}, - .done = false, - .fd = fd, - }; - - this.event_loop = this.parent().globalThis.bunVM().eventLoop(); - } - - pub fn onStart(this: *FileReader) StreamStart { - this.reader.setParent(this); - const was_lazy = this.lazy != .none; - var pollable = false; - var file_type: bun.io.FileType = .file; - if (this.lazy == .blob) { - switch (this.lazy.blob.data) { - .s3, .bytes => @panic("Invalid state in FileReader: expected file "), - .file => |*file| { - defer { - this.lazy.blob.deref(); - this.lazy = .none; - } - switch (Lazy.openFileBlob(file)) { - .err => |err| { - this.fd = bun.invalid_fd; - return .{ .err = err }; - }, - .result => |opened| { - bun.assert(opened.fd.isValid()); - this.fd = opened.fd; - pollable = opened.pollable; - file_type = opened.file_type; - this.reader.flags.nonblocking = opened.nonblocking; - this.reader.flags.pollable = pollable; - }, - } - }, - } - } - - { - const reader_fd = this.reader.getFd(); - if (reader_fd != bun.invalid_fd and this.fd == bun.invalid_fd) { - this.fd = reader_fd; - } - } - - this.event_loop = JSC.EventLoopHandle.init(this.parent().globalThis.bunVM().eventLoop()); - - if (was_lazy) { - _ = this.parent().incrementCount(); - this.waiting_for_onReaderDone = true; - if (this.start_offset) |offset| { - switch (this.reader.startFileOffset(this.fd, pollable, offset)) { - .result => {}, - .err => |e| { - return .{ .err = e }; - }, - } - } else { - switch (this.reader.start(this.fd, pollable)) { - .result => {}, - .err => |e| { - return .{ .err = e }; - }, - } - } - } else if (comptime Environment.isPosix) { - if (this.reader.flags.pollable and !this.reader.isDone()) { - this.waiting_for_onReaderDone = true; - _ = this.parent().incrementCount(); - } - } - - if (comptime Environment.isPosix) { - if (file_type == .socket) { - this.reader.flags.socket = true; - } - - if (this.reader.handle.getPoll()) |poll| { - if (file_type == .socket or this.reader.flags.socket) { - poll.flags.insert(.socket); - } else { - // if it's a TTY, we report it as a fifo - // we want the behavior to be as though it were a blocking pipe. - poll.flags.insert(.fifo); - } - - if (this.reader.flags.nonblocking) { - poll.flags.insert(.nonblocking); - } - } - } - - this.started = true; - - if (this.reader.isDone()) { - this.consumeReaderBuffer(); - if (this.buffered.items.len > 0) { - const buffered = this.buffered; - this.buffered = .{}; - return .{ .owned_and_done = bun.ByteList.init(buffered.items) }; - } - } else if (comptime Environment.isPosix) { - if (!was_lazy and this.reader.flags.pollable) { - this.reader.read(); - } - } - - return .{ .ready = {} }; - } - - pub fn parent(this: *@This()) *Source { - return @fieldParentPtr("context", this); - } - - pub fn onCancel(this: *FileReader) void { - if (this.done) return; - this.done = true; - this.reader.updateRef(false); - if (!this.reader.isDone()) - this.reader.close(); - } - - pub fn deinit(this: *FileReader) void { - this.buffered.deinit(bun.default_allocator); - this.reader.updateRef(false); - this.reader.deinit(); - this.pending_value.deinit(); - - if (this.lazy != .none) { - this.lazy.blob.deref(); - this.lazy = .none; - } - - this.parent().deinit(); - } - - pub fn onReadChunk(this: *@This(), init_buf: []const u8, state: bun.io.ReadState) bool { - var buf = init_buf; - log("onReadChunk() = {d} ({s}) - read_inside_on_pull: {s}", .{ buf.len, @tagName(state), @tagName(this.read_inside_on_pull) }); - - if (this.done) { - this.reader.close(); - return false; - } - var close = false; - defer if (close) this.reader.close(); - var hasMore = state != .eof; - - if (buf.len > 0) { - if (this.max_size) |max_size| { - if (this.total_readed >= max_size) return false; - const len = @min(max_size - this.total_readed, buf.len); - if (buf.len > len) { - buf = buf[0..len]; - } - this.total_readed += len; - - if (buf.len == 0) { - close = true; - hasMore = false; - } - } - } - - if (this.read_inside_on_pull != .none) { - switch (this.read_inside_on_pull) { - .js => |in_progress| { - if (in_progress.len >= buf.len and !hasMore) { - @memcpy(in_progress[0..buf.len], buf); - this.read_inside_on_pull = .{ .js = in_progress[buf.len..] }; - } else if (in_progress.len > 0 and !hasMore) { - this.read_inside_on_pull = .{ .temporary = buf }; - } else if (hasMore and !bun.isSliceInBuffer(buf, this.buffered.allocatedSlice())) { - this.buffered.appendSlice(bun.default_allocator, buf) catch bun.outOfMemory(); - this.read_inside_on_pull = .{ .use_buffered = buf.len }; - } - }, - .use_buffered => |original| { - this.buffered.appendSlice(bun.default_allocator, buf) catch bun.outOfMemory(); - this.read_inside_on_pull = .{ .use_buffered = buf.len + original }; - }, - .none => unreachable, - else => @panic("Invalid state"), - } - } else if (this.pending.state == .pending) { - if (buf.len == 0) { - { - if (this.buffered.items.len == 0) { - if (this.buffered.capacity > 0) { - this.buffered.clearAndFree(bun.default_allocator); - } - - if (this.reader.buffer().items.len != 0) { - this.buffered = this.reader.buffer().moveToUnmanaged(); - } - } - - var buffer = &this.buffered; - defer buffer.clearAndFree(bun.default_allocator); - if (buffer.items.len > 0) { - if (this.pending_view.len >= buffer.items.len) { - @memcpy(this.pending_view[0..buffer.items.len], buffer.items); - this.pending.result = .{ .into_array_and_done = .{ .value = this.pending_value.get() orelse .zero, .len = @truncate(buffer.items.len) } }; - } else { - this.pending.result = .{ .owned_and_done = bun.ByteList.fromList(buffer.*) }; - buffer.* = .{}; - } - } else { - this.pending.result = .{ .done = {} }; - } - } - this.pending_value.clearWithoutDeallocation(); - this.pending_view = &.{}; - this.pending.run(); - return false; - } - - const was_done = this.reader.isDone(); - - if (this.pending_view.len >= buf.len) { - @memcpy(this.pending_view[0..buf.len], buf); - this.reader.buffer().clearRetainingCapacity(); - this.buffered.clearRetainingCapacity(); - - if (was_done) { - this.pending.result = .{ - .into_array_and_done = .{ - .value = this.pending_value.get() orelse .zero, - .len = @truncate(buf.len), - }, - }; - } else { - this.pending.result = .{ - .into_array = .{ - .value = this.pending_value.get() orelse .zero, - .len = @truncate(buf.len), - }, - }; - } - - this.pending_value.clearWithoutDeallocation(); - this.pending_view = &.{}; - this.pending.run(); - return !was_done; - } - - if (!bun.isSliceInBuffer(buf, this.buffered.allocatedSlice())) { - if (this.reader.isDone()) { - if (bun.isSliceInBuffer(buf, this.reader.buffer().allocatedSlice())) { - this.reader.buffer().* = std.ArrayList(u8).init(bun.default_allocator); - } - this.pending.result = .{ - .temporary_and_done = bun.ByteList.init(buf), - }; - } else { - this.pending.result = .{ - .temporary = bun.ByteList.init(buf), - }; - - if (bun.isSliceInBuffer(buf, this.reader.buffer().allocatedSlice())) { - this.reader.buffer().clearRetainingCapacity(); - } - } - - this.pending_value.clearWithoutDeallocation(); - this.pending_view = &.{}; - this.pending.run(); - return !was_done; - } - - if (this.reader.isDone()) { - this.pending.result = .{ - .owned_and_done = bun.ByteList.init(buf), - }; - } else { - this.pending.result = .{ - .owned = bun.ByteList.init(buf), - }; - } - this.buffered = .{}; - this.pending_value.clearWithoutDeallocation(); - this.pending_view = &.{}; - this.pending.run(); - return !was_done; - } else if (!bun.isSliceInBuffer(buf, this.buffered.allocatedSlice())) { - this.buffered.appendSlice(bun.default_allocator, buf) catch bun.outOfMemory(); - if (bun.isSliceInBuffer(buf, this.reader.buffer().allocatedSlice())) { - this.reader.buffer().clearRetainingCapacity(); - } - } - - // For pipes, we have to keep pulling or the other process will block. - return this.read_inside_on_pull != .temporary and !(this.buffered.items.len + this.reader.buffer().items.len >= this.highwater_mark and !this.reader.flags.pollable); - } - - fn isPulling(this: *const FileReader) bool { - return this.read_inside_on_pull != .none; - } - - pub fn onPull(this: *FileReader, buffer: []u8, array: JSC.JSValue) StreamResult { - array.ensureStillAlive(); - defer array.ensureStillAlive(); - const drained = this.drain(); - - if (drained.len > 0) { - log("onPull({d}) = {d}", .{ buffer.len, drained.len }); - - this.pending_value.clearWithoutDeallocation(); - this.pending_view = &.{}; - - if (buffer.len >= @as(usize, drained.len)) { - @memcpy(buffer[0..drained.len], drained.slice()); - this.buffered.clearAndFree(bun.default_allocator); - - if (this.reader.isDone()) { - return .{ .into_array_and_done = .{ .value = array, .len = drained.len } }; - } else { - return .{ .into_array = .{ .value = array, .len = drained.len } }; - } - } - - if (this.reader.isDone()) { - return .{ .owned_and_done = drained }; - } else { - return .{ .owned = drained }; - } - } - - if (this.reader.isDone()) { - return .{ .done = {} }; - } - - if (!this.reader.hasPendingRead()) { - this.read_inside_on_pull = .{ .js = buffer }; - this.reader.read(); - - defer this.read_inside_on_pull = .{ .none = {} }; - switch (this.read_inside_on_pull) { - .js => |remaining_buf| { - const amount_read = buffer.len - remaining_buf.len; - - log("onPull({d}) = {d}", .{ buffer.len, amount_read }); - - if (amount_read > 0) { - if (this.reader.isDone()) { - return .{ .into_array_and_done = .{ .value = array, .len = @truncate(amount_read) } }; - } - - return .{ .into_array = .{ .value = array, .len = @truncate(amount_read) } }; - } - - if (this.reader.isDone()) { - return .{ .done = {} }; - } - }, - .temporary => |buf| { - log("onPull({d}) = {d}", .{ buffer.len, buf.len }); - if (this.reader.isDone()) { - return .{ .temporary_and_done = bun.ByteList.init(buf) }; - } - - return .{ .temporary = bun.ByteList.init(buf) }; - }, - .use_buffered => { - const buffered = this.buffered; - this.buffered = .{}; - log("onPull({d}) = {d}", .{ buffer.len, buffered.items.len }); - if (this.reader.isDone()) { - return .{ .owned_and_done = bun.ByteList.init(buffered.items) }; - } - - return .{ .owned = bun.ByteList.init(buffered.items) }; - }, - else => {}, - } - - if (this.reader.isDone()) { - log("onPull({d}) = done", .{buffer.len}); - - return .{ .done = {} }; - } - } - - this.pending_value.set(this.parent().globalThis, array); - this.pending_view = buffer; - - log("onPull({d}) = pending", .{buffer.len}); - - return .{ .pending = &this.pending }; - } - - pub fn drain(this: *FileReader) bun.ByteList { - if (this.buffered.items.len > 0) { - const out = bun.ByteList.init(this.buffered.items); - this.buffered = .{}; - if (comptime Environment.allow_assert) { - bun.assert(this.reader.buffer().items.ptr != out.ptr); - } - return out; - } - - if (this.reader.hasPendingRead()) { - return .{}; - } - - const out = this.reader.buffer().*; - this.reader.buffer().* = std.ArrayList(u8).init(bun.default_allocator); - return bun.ByteList.fromList(out); - } - - pub fn setRefOrUnref(this: *FileReader, enable: bool) void { - if (this.done) return; - this.reader.updateRef(enable); - } - - fn consumeReaderBuffer(this: *FileReader) void { - if (this.buffered.capacity == 0) { - this.buffered = this.reader.buffer().moveToUnmanaged(); - } - } - - pub fn onReaderDone(this: *FileReader) void { - log("onReaderDone()", .{}); - if (!this.isPulling()) { - this.consumeReaderBuffer(); - if (this.pending.state == .pending) { - if (this.buffered.items.len > 0) { - this.pending.result = .{ .owned_and_done = bun.ByteList.fromList(this.buffered) }; - } else { - this.pending.result = .{ .done = {} }; - } - this.buffered = .{}; - this.pending.run(); - } else if (this.buffered.items.len > 0) { - const this_value = this.parent().this_jsvalue; - const globalThis = this.parent().globalThis; - if (this_value != .zero) { - if (Source.js.onDrainCallbackGetCached(this_value)) |cb| { - const buffered = this.buffered; - this.buffered = .{}; - this.parent().incrementCount(); - defer _ = this.parent().decrementCount(); - this.eventLoop().js.runCallback( - cb, - globalThis, - .undefined, - &.{ - JSC.ArrayBuffer.fromBytes( - buffered.items, - .Uint8Array, - ).toJS( - globalThis, - null, - ), - }, - ); - } - } - } - } - - this.parent().onClose(); - if (this.waiting_for_onReaderDone) { - this.waiting_for_onReaderDone = false; - _ = this.parent().decrementCount(); - } - } - - pub fn onReaderError(this: *FileReader, err: bun.sys.Error) void { - this.consumeReaderBuffer(); - - this.pending.result = .{ .err = .{ .Error = err } }; - this.pending.run(); - } - - pub fn setRawMode(this: *FileReader, flag: bool) bun.sys.Maybe(void) { - if (!Environment.isWindows) { - @panic("FileReader.setRawMode must not be called on " ++ comptime Environment.os.displayString()); - } - return this.reader.setRawMode(flag); - } - - pub fn memoryCost(this: *const FileReader) usize { - // ReadableStreamSource covers @sizeOf(FileReader) - return this.reader.memoryCost() + this.buffered.capacity; - } - - pub const Source = ReadableStreamSource( - @This(), - "File", - onStart, - onPull, - onCancel, - deinit, - setRefOrUnref, - drain, - memoryCost, - null, - ); -}; - -pub const ByteBlobLoader = struct { - offset: Blob.SizeType = 0, - store: ?*Blob.Store = null, - chunk_size: Blob.SizeType = 1024 * 1024 * 2, - remain: Blob.SizeType = 1024 * 1024 * 2, - done: bool = false, - pulled: bool = false, - - pub const tag = ReadableStream.Tag.Blob; - - pub fn parent(this: *@This()) *Source { - return @fieldParentPtr("context", this); - } - - pub fn setup( - this: *ByteBlobLoader, - blob: *const Blob, - user_chunk_size: Blob.SizeType, - ) void { - blob.store.?.ref(); - var blobe = blob.*; - blobe.resolveSize(); - this.* = ByteBlobLoader{ - .offset = blobe.offset, - .store = blobe.store.?, - .chunk_size = @min( - if (user_chunk_size > 0) @min(user_chunk_size, blobe.size) else blobe.size, - 1024 * 1024 * 2, - ), - .remain = blobe.size, - .done = false, + pub fn swap(this: *BufferAction) *JSC.JSPromise { + return switch (this.*) { + inline else => |*promise| promise.swap(), }; } - pub fn onStart(this: *ByteBlobLoader) StreamStart { - return .{ .chunk_size = this.chunk_size }; - } - - pub fn onPull(this: *ByteBlobLoader, buffer: []u8, array: JSC.JSValue) StreamResult { - array.ensureStillAlive(); - defer array.ensureStillAlive(); - this.pulled = true; - const store = this.store orelse return .{ .done = {} }; - if (this.done) { - return .{ .done = {} }; - } - - var temporary = store.sharedView(); - temporary = temporary[@min(this.offset, temporary.len)..]; - - temporary = temporary[0..@min(buffer.len, @min(temporary.len, this.remain))]; - if (temporary.len == 0) { - this.clearStore(); - this.done = true; - return .{ .done = {} }; - } - - const copied = @as(Blob.SizeType, @intCast(temporary.len)); - - this.remain -|= copied; - this.offset +|= copied; - bun.assert(buffer.ptr != temporary.ptr); - @memcpy(buffer[0..temporary.len], temporary); - if (this.remain == 0) { - return .{ .into_array_and_done = .{ .value = array, .len = copied } }; - } - - return .{ .into_array = .{ .value = array, .len = copied } }; - } - - pub fn toAnyBlob(this: *ByteBlobLoader, globalThis: *JSC.JSGlobalObject) ?AnyBlob { - if (this.store) |store| { - _ = this.detachStore(); - if (this.offset == 0 and this.remain == store.size()) { - if (store.toAnyBlob()) |blob| { - defer store.deref(); - return blob; - } - } - - var blob = Blob.initWithStore(store, globalThis); - blob.offset = this.offset; - blob.size = this.remain; - this.parent().is_closed = true; - return .{ .Blob = blob }; - } - return null; - } - - pub fn detachStore(this: *ByteBlobLoader) ?*Blob.Store { - if (this.store) |store| { - this.store = null; - this.done = true; - return store; - } - return null; - } - - pub fn onCancel(this: *ByteBlobLoader) void { - this.clearStore(); - } - - pub fn deinit(this: *ByteBlobLoader) void { - this.clearStore(); - this.parent().deinit(); - } - - fn clearStore(this: *ByteBlobLoader) void { - if (this.store) |store| { - this.store = null; - store.deref(); + pub fn deinit(this: *BufferAction) void { + switch (this.*) { + inline else => |*promise| promise.deinit(), } } - - pub fn drain(this: *ByteBlobLoader) bun.ByteList { - const store = this.store orelse return .{}; - var temporary = store.sharedView(); - temporary = temporary[this.offset..]; - temporary = temporary[0..@min(16384, @min(temporary.len, this.remain))]; - - const cloned = bun.ByteList.init(temporary).listManaged(bun.default_allocator).clone() catch bun.outOfMemory(); - this.offset +|= @as(Blob.SizeType, @truncate(cloned.items.len)); - this.remain -|= @as(Blob.SizeType, @truncate(cloned.items.len)); - - return bun.ByteList.fromList(cloned); - } - - pub fn toBufferedValue(this: *ByteBlobLoader, globalThis: *JSC.JSGlobalObject, action: BufferedReadableStreamAction) bun.JSError!JSC.JSValue { - if (this.toAnyBlob(globalThis)) |blob_| { - var blob = blob_; - return blob.toPromise(globalThis, action); - } - - return .zero; - } - - pub fn memoryCost(this: *const ByteBlobLoader) usize { - // ReadableStreamSource covers @sizeOf(FileReader) - if (this.store) |store| { - return store.memoryCost(); - } - return 0; - } - - pub const Source = ReadableStreamSource( - @This(), - "Blob", - onStart, - onPull, - onCancel, - deinit, - null, - drain, - memoryCost, - toBufferedValue, - ); -}; - -pub const PipeFunction = *const fn (ctx: *anyopaque, stream: StreamResult, allocator: std.mem.Allocator) void; - -pub const PathOrFileDescriptor = union(enum) { - path: ZigString.Slice, - fd: bun.FileDescriptor, - - pub fn deinit(this: *const PathOrFileDescriptor) void { - if (this.* == .path) this.path.deinit(); - } -}; - -pub const Pipe = struct { - ctx: ?*anyopaque = null, - onPipe: ?PipeFunction = null, - - pub fn New(comptime Type: type, comptime Function: anytype) type { - return struct { - pub fn pipe(self: *anyopaque, stream: StreamResult, allocator: std.mem.Allocator) void { - Function(@as(*Type, @ptrCast(@alignCast(self))), stream, allocator); - } - - pub fn init(self: *Type) Pipe { - return Pipe{ - .ctx = self, - .onPipe = pipe, - }; - } - }; - } -}; - -pub const ByteStream = struct { - buffer: std.ArrayList(u8) = .{ - .allocator = bun.default_allocator, - .items = &.{}, - .capacity = 0, - }, - has_received_last_chunk: bool = false, - pending: StreamResult.Pending = StreamResult.Pending{ - .result = .{ .done = {} }, - }, - done: bool = false, - pending_buffer: []u8 = &.{}, - pending_value: JSC.Strong = .empty, - offset: usize = 0, - highWaterMark: Blob.SizeType = 0, - pipe: Pipe = .{}, - size_hint: Blob.SizeType = 0, - buffer_action: ?BufferAction = null, - - const log = Output.scoped(.ByteStream, false); - - const BufferAction = union(BufferedReadableStreamAction) { - text: JSC.JSPromise.Strong, - arrayBuffer: JSC.JSPromise.Strong, - blob: JSC.JSPromise.Strong, - bytes: JSC.JSPromise.Strong, - json: JSC.JSPromise.Strong, - - pub fn fulfill(this: *BufferAction, global: *JSC.JSGlobalObject, blob: *AnyBlob) void { - blob.wrap(.{ .normal = this.swap() }, global, this.*); - } - - pub fn reject(this: *BufferAction, global: *JSC.JSGlobalObject, err: StreamResult.StreamError) void { - this.swap().reject(global, err.toJSWeak(global)[0]); - } - - pub fn resolve(this: *BufferAction, global: *JSC.JSGlobalObject, result: JSC.JSValue) void { - this.swap().resolve(global, result); - } - - pub fn value(this: *BufferAction) JSC.JSValue { - return switch (this.*) { - inline else => |promise| promise.value(), - }; - } - - pub fn get(this: *BufferAction) *JSC.JSPromise { - return switch (this.*) { - inline else => |promise| promise.get(), - }; - } - - pub fn swap(this: *BufferAction) *JSC.JSPromise { - return switch (this.*) { - inline else => |*promise| promise.swap(), - }; - } - - pub fn deinit(this: *BufferAction) void { - switch (this.*) { - inline else => |*promise| promise.deinit(), - } - } - }; - - pub const tag = ReadableStream.Tag.Bytes; - - pub fn setup(this: *ByteStream) void { - this.* = .{}; - } - - pub fn onStart(this: *@This()) StreamStart { - if (this.has_received_last_chunk and this.buffer.items.len == 0) { - return .{ .empty = {} }; - } - - if (this.has_received_last_chunk) { - return .{ .owned_and_done = bun.ByteList.fromList(this.buffer.moveToUnmanaged()) }; - } - - if (this.highWaterMark == 0) { - return .{ .ready = {} }; - } - - // For HTTP, the maximum streaming response body size will be 512 KB. - // #define LIBUS_RECV_BUFFER_LENGTH 524288 - // For HTTPS, the size is probably quite a bit lower like 64 KB due to TLS transmission. - // We add 1 extra page size so that if there's a little bit of excess buffered data, we avoid extra allocations. - const page_size: Blob.SizeType = @intCast(std.heap.pageSize()); - return .{ .chunk_size = @min(512 * 1024 + page_size, @max(this.highWaterMark, page_size)) }; - } - - pub fn value(this: *@This()) JSValue { - const result = this.pending_value.get() orelse { - return .zero; - }; - this.pending_value.clearWithoutDeallocation(); - return result; - } - - pub fn isCancelled(this: *const @This()) bool { - return this.parent().cancelled; - } - - pub fn unpipeWithoutDeref(this: *@This()) void { - this.pipe.ctx = null; - this.pipe.onPipe = null; - } - - pub fn onData( - this: *@This(), - stream: StreamResult, - allocator: std.mem.Allocator, - ) void { - JSC.markBinding(@src()); - if (this.done) { - if (stream.isDone() and (stream == .owned or stream == .owned_and_done)) { - if (stream == .owned) allocator.free(stream.owned.slice()); - if (stream == .owned_and_done) allocator.free(stream.owned_and_done.slice()); - } - this.has_received_last_chunk = stream.isDone(); - - log("ByteStream.onData already done... do nothing", .{}); - - return; - } - - bun.assert(!this.has_received_last_chunk or stream == .err); - this.has_received_last_chunk = stream.isDone(); - - if (this.pipe.ctx) |ctx| { - this.pipe.onPipe.?(ctx, stream, allocator); - return; - } - - const chunk = stream.slice(); - - if (this.buffer_action) |*action| { - if (stream == .err) { - defer { - this.buffer.clearAndFree(); - this.pending.result.deinit(); - this.pending.result = .{ .done = {} }; - this.buffer_action = null; - } - - log("ByteStream.onData err action.reject()", .{}); - - action.reject(this.parent().globalThis, stream.err); - return; - } - - if (this.has_received_last_chunk) { - defer { - this.buffer_action = null; - } - - if (this.buffer.capacity == 0 and stream == .done) { - log("ByteStream.onData done and action.fulfill()", .{}); - - var blob = this.toAnyBlob().?; - action.fulfill(this.parent().globalThis, &blob); - return; - } - if (this.buffer.capacity == 0 and stream == .owned_and_done) { - log("ByteStream.onData owned_and_done and action.fulfill()", .{}); - - this.buffer = std.ArrayList(u8).fromOwnedSlice(bun.default_allocator, @constCast(chunk)); - var blob = this.toAnyBlob().?; - action.fulfill(this.parent().globalThis, &blob); - return; - } - defer { - if (stream == .owned_and_done or stream == .owned) { - allocator.free(stream.slice()); - } - } - log("ByteStream.onData appendSlice and action.fulfill()", .{}); - - this.buffer.appendSlice(chunk) catch bun.outOfMemory(); - var blob = this.toAnyBlob().?; - action.fulfill(this.parent().globalThis, &blob); - - return; - } else { - this.buffer.appendSlice(chunk) catch bun.outOfMemory(); - - if (stream == .owned_and_done or stream == .owned) { - allocator.free(stream.slice()); - } - } - - return; - } - - if (this.pending.state == .pending) { - bun.assert(this.buffer.items.len == 0); - const to_copy = this.pending_buffer[0..@min(chunk.len, this.pending_buffer.len)]; - const pending_buffer_len = this.pending_buffer.len; - bun.assert(to_copy.ptr != chunk.ptr); - @memcpy(to_copy, chunk[0..to_copy.len]); - this.pending_buffer = &.{}; - - const is_really_done = this.has_received_last_chunk and to_copy.len <= pending_buffer_len; - - if (is_really_done) { - this.done = true; - - if (to_copy.len == 0) { - if (stream == .err) { - this.pending.result = .{ - .err = stream.err, - }; - } else { - this.pending.result = .{ - .done = {}, - }; - } - } else { - this.pending.result = .{ - .into_array_and_done = .{ - .value = this.value(), - .len = @as(Blob.SizeType, @truncate(to_copy.len)), - }, - }; - } - } else { - this.pending.result = .{ - .into_array = .{ - .value = this.value(), - .len = @as(Blob.SizeType, @truncate(to_copy.len)), - }, - }; - } - - const remaining = chunk[to_copy.len..]; - if (remaining.len > 0 and chunk.len > 0) - this.append(stream, to_copy.len, chunk, allocator) catch @panic("Out of memory while copying request body"); - - log("ByteStream.onData pending.run()", .{}); - - this.pending.run(); - - return; - } - - log("ByteStream.onData no action just append", .{}); - - this.append(stream, 0, chunk, allocator) catch @panic("Out of memory while copying request body"); - } - - pub fn append( - this: *@This(), - stream: StreamResult, - offset: usize, - base_address: []const u8, - allocator: std.mem.Allocator, - ) !void { - const chunk = stream.slice()[offset..]; - - if (this.buffer.capacity == 0) { - switch (stream) { - .owned => |owned| { - this.buffer = owned.listManaged(allocator); - this.offset += offset; - }, - .owned_and_done => |owned| { - this.buffer = owned.listManaged(allocator); - this.offset += offset; - }, - .temporary_and_done, .temporary => { - this.buffer = try std.ArrayList(u8).initCapacity(bun.default_allocator, chunk.len); - this.buffer.appendSliceAssumeCapacity(chunk); - }, - .err => { - this.pending.result = .{ .err = stream.err }; - }, - .done => {}, - else => unreachable, - } - return; - } - - switch (stream) { - .temporary_and_done, .temporary => { - try this.buffer.appendSlice(chunk); - }, - .owned_and_done, .owned => { - try this.buffer.appendSlice(chunk); - allocator.free(@constCast(base_address)); - }, - .err => { - if (this.buffer_action != null) { - @panic("Expected buffer action to be null"); - } - - this.pending.result = .{ .err = stream.err }; - }, - .done => {}, - // We don't support the rest of these yet - else => unreachable, - } - - return; - } - - pub fn setValue(this: *@This(), view: JSC.JSValue) void { - JSC.markBinding(@src()); - this.pending_value.set(this.parent().globalThis, view); - } - - pub fn parent(this: *@This()) *Source { - return @fieldParentPtr("context", this); - } - - pub fn onPull(this: *@This(), buffer: []u8, view: JSC.JSValue) StreamResult { - JSC.markBinding(@src()); - bun.assert(buffer.len > 0); - bun.debugAssert(this.buffer_action == null); - - if (this.buffer.items.len > 0) { - bun.assert(this.value() == .zero); - const to_write = @min( - this.buffer.items.len - this.offset, - buffer.len, - ); - const remaining_in_buffer = this.buffer.items[this.offset..][0..to_write]; - - @memcpy(buffer[0..to_write], this.buffer.items[this.offset..][0..to_write]); - - if (this.offset + to_write == this.buffer.items.len) { - this.offset = 0; - this.buffer.items.len = 0; - } else { - this.offset += to_write; - } - - if (this.has_received_last_chunk and remaining_in_buffer.len == 0) { - this.buffer.clearAndFree(); - this.done = true; - - return .{ - .into_array_and_done = .{ - .value = view, - .len = @as(Blob.SizeType, @truncate(to_write)), - }, - }; - } - - return .{ - .into_array = .{ - .value = view, - .len = @as(Blob.SizeType, @truncate(to_write)), - }, - }; - } - - if (this.has_received_last_chunk) { - return .{ - .done = {}, - }; - } - - this.pending_buffer = buffer; - this.setValue(view); - - return .{ - .pending = &this.pending, - }; - } - - pub fn onCancel(this: *@This()) void { - JSC.markBinding(@src()); - const view = this.value(); - if (this.buffer.capacity > 0) this.buffer.clearAndFree(); - this.done = true; - this.pending_value.deinit(); - - if (view != .zero) { - this.pending_buffer = &.{}; - this.pending.result.deinit(); - this.pending.result = .{ .done = {} }; - this.pending.run(); - } - - if (this.buffer_action) |*action| { - const global = this.parent().globalThis; - action.reject(global, .{ .AbortReason = .UserAbort }); - this.buffer_action = null; - } - } - - pub fn memoryCost(this: *const @This()) usize { - // ReadableStreamSource covers @sizeOf(ByteStream) - return this.buffer.capacity; - } - - pub fn deinit(this: *@This()) void { - JSC.markBinding(@src()); - if (this.buffer.capacity > 0) this.buffer.clearAndFree(); - - this.pending_value.deinit(); - if (!this.done) { - this.done = true; - - this.pending_buffer = &.{}; - this.pending.result.deinit(); - this.pending.result = .{ .done = {} }; - this.pending.run(); - } - if (this.buffer_action) |*action| { - action.deinit(); - } - this.parent().deinit(); - } - - pub fn drain(this: *@This()) bun.ByteList { - if (this.buffer.items.len > 0) { - const out = bun.ByteList.fromList(this.buffer); - this.buffer = .{ - .allocator = bun.default_allocator, - .items = &.{}, - .capacity = 0, - }; - - return out; - } - - return .{}; - } - - pub fn toAnyBlob(this: *@This()) ?AnyBlob { - if (this.has_received_last_chunk) { - const buffer = this.buffer; - this.buffer = .{ - .allocator = bun.default_allocator, - .items = &.{}, - .capacity = 0, - }; - this.done = true; - this.pending.result.deinit(); - this.pending.result = .{ .done = {} }; - this.parent().is_closed = true; - return AnyBlob{ - .InternalBlob = JSC.WebCore.InternalBlob{ - .bytes = buffer, - .was_string = false, - }, - }; - } - - return null; - } - - pub fn toBufferedValue(this: *@This(), globalThis: *JSC.JSGlobalObject, action: BufferedReadableStreamAction) bun.JSError!JSC.JSValue { - if (this.buffer_action != null) { - return globalThis.throw("Cannot buffer value twice", .{}); - } - - if (this.pending.result == .err) { - const err, _ = this.pending.result.err.toJSWeak(globalThis); - this.pending.result.deinit(); - this.done = true; - this.buffer.clearAndFree(); - return JSC.JSPromise.dangerouslyCreateRejectedPromiseValueWithoutNotifyingVM(globalThis, err); - } - - if (this.toAnyBlob()) |blob_| { - var blob = blob_; - return blob.toPromise(globalThis, action); - } - - this.buffer_action = switch (action) { - .blob => .{ .blob = JSC.JSPromise.Strong.init(globalThis) }, - .bytes => .{ .bytes = JSC.JSPromise.Strong.init(globalThis) }, - .arrayBuffer => .{ .arrayBuffer = JSC.JSPromise.Strong.init(globalThis) }, - .json => .{ .json = JSC.JSPromise.Strong.init(globalThis) }, - .text => .{ .text = JSC.JSPromise.Strong.init(globalThis) }, - }; - - return this.buffer_action.?.value(); - } - - pub const Source = ReadableStreamSource( - @This(), - "Bytes", - onStart, - onPull, - onCancel, - deinit, - null, - drain, - memoryCost, - toBufferedValue, - ); }; pub const ReadResult = union(enum) { @@ -5395,7 +1687,7 @@ pub const ReadResult = union(enum) { done: void, read: []u8, - pub fn toStream(this: ReadResult, pending: *StreamResult.Pending, buf: []u8, view: JSValue, close_on_empty: bool) StreamResult { + pub fn toStream(this: ReadResult, pending: *Result.Pending, buf: []u8, view: JSValue, close_on_empty: bool) Result { return toStreamWithIsDone( this, pending, @@ -5405,7 +1697,7 @@ pub const ReadResult = union(enum) { false, ); } - pub fn toStreamWithIsDone(this: ReadResult, pending: *StreamResult.Pending, buf: []u8, view: JSValue, close_on_empty: bool, is_done: bool) StreamResult { + pub fn toStreamWithIsDone(this: ReadResult, pending: *Result.Pending, buf: []u8, view: JSValue, close_on_empty: bool, is_done: bool) Result { return switch (this) { .pending => .{ .pending = pending }, .err => .{ .err = .{ .Error = this.err } }, @@ -5415,13 +1707,13 @@ pub const ReadResult = union(enum) { const done = is_done or (close_on_empty and slice.len == 0); break :brk if (owned and done) - StreamResult{ .owned_and_done = bun.ByteList.init(slice) } + Result{ .owned_and_done = bun.ByteList.init(slice) } else if (owned) - StreamResult{ .owned = bun.ByteList.init(slice) } + Result{ .owned = bun.ByteList.init(slice) } else if (done) - StreamResult{ .into_array_and_done = .{ .len = @as(Blob.SizeType, @truncate(slice.len)), .value = view } } + Result{ .into_array_and_done = .{ .len = @as(Blob.SizeType, @truncate(slice.len)), .value = view } } else - StreamResult{ .into_array = .{ .len = @as(Blob.SizeType, @truncate(slice.len)), .value = view } }; + Result{ .into_array = .{ .len = @as(Blob.SizeType, @truncate(slice.len)), .value = view } }; }, }; } @@ -5447,108 +1739,50 @@ pub const AutoSizer = struct { } }; -// Linux default pipe size is 16 pages of memory -const default_fifo_chunk_size = 64 * 1024; -const default_file_chunk_size = 1024 * 1024 * 2; +const std = @import("std"); +const Api = @import("../../api/schema.zig").Api; +const bun = @import("bun"); +const MimeType = HTTPClient.MimeType; +const ZigURL = @import("../../url.zig").URL; +const HTTPClient = bun.http; +const JSC = bun.JSC; -pub fn NewReadyWatcher( - comptime Context: type, - comptime flag_: Async.FilePoll.Flags, - comptime onReady: anytype, -) type { - return struct { - const flag = flag_; - const ready = onReady; +const Method = @import("../../http/method.zig").Method; +const FetchHeaders = WebCore.FetchHeaders; +const ObjectPool = @import("../../pool.zig").ObjectPool; +const SystemError = JSC.SystemError; +const Output = bun.Output; +const MutableString = bun.MutableString; +const strings = bun.strings; +const string = bun.string; +const default_allocator = bun.default_allocator; +const FeatureFlags = bun.FeatureFlags; +const ArrayBuffer = JSC.ArrayBuffer; +const Async = bun.Async; - const Watcher = @This(); +const Environment = bun.Environment; +const ZigString = JSC.ZigString; +const IdentityContext = bun.IdentityContext; +const JSInternalPromise = JSC.JSInternalPromise; +const JSPromise = JSC.JSPromise; +const JSValue = JSC.JSValue; +const JSGlobalObject = JSC.JSGlobalObject; +const E = bun.sys.E; +const VirtualMachine = JSC.VirtualMachine; +const Task = JSC.Task; +const JSPrinter = bun.js_printer; +const picohttp = bun.picohttp; +const StringJoiner = bun.StringJoiner; +const uws = bun.uws; +const Blob = bun.webcore.Blob; +const Response = JSC.WebCore.Response; +const Request = JSC.WebCore.Request; +const assert = bun.assert; +const Syscall = bun.sys; +const uv = bun.windows.libuv; +const WebCore = JSC.WebCore; +const Sink = WebCore.Sink; +const AutoFlusher = WebCore.AutoFlusher; +const FileSink = WebCore.FileSink; - pub inline fn isFIFO(this: *const Context) bool { - if (comptime @hasField(Context, "is_fifo")) { - return this.is_fifo; - } - - if (this.poll_ref) |_poll_ref| { - return _poll_ref.flags.contains(.fifo); - } - - if (comptime @hasField(Context, "mode")) { - return bun.S.ISFIFO(this.mode); - } - - return false; - } - - pub fn onPoll(this: *Context, sizeOrOffset: i64, _: u16) void { - defer JSC.VirtualMachine.get().drainMicrotasks(); - ready(this, sizeOrOffset); - } - - pub fn unwatch(this: *Context, fd_: anytype) void { - if (comptime Environment.isWindows) { - @panic("TODO on Windows"); - } - - bun.assert(this.poll_ref.?.fd == fd_); - bun.assert( - this.poll_ref.?.unregister(JSC.VirtualMachine.get().event_loop_handle.?, false) == .result, - ); - this.poll_ref.?.disableKeepingProcessAlive(JSC.VirtualMachine.get()); - } - - pub fn pollRef(this: *Context) *Async.FilePoll { - return this.poll_ref orelse brk: { - this.poll_ref = Async.FilePoll.init( - JSC.VirtualMachine.get(), - this.fd, - .{}, - Context, - this, - ); - break :brk this.poll_ref.?; - }; - } - - pub fn isWatching(this: *const Context) bool { - if (this.poll_ref) |poll| { - return poll.flags.contains(flag.poll()) and !poll.flags.contains(.needs_rearm); - } - - return false; - } - - pub fn watch(this: *Context, fd: bun.FileDescriptor) void { - if (comptime Environment.isWindows) { - @panic("Do not call watch() on windows"); - } - var poll_ref: *Async.FilePoll = this.poll_ref orelse brk: { - this.poll_ref = Async.FilePoll.init( - JSC.VirtualMachine.get(), - fd, - .{}, - Context, - this, - ); - break :brk this.poll_ref.?; - }; - bun.assert(poll_ref.fd == fd); - bun.assert(!this.isWatching()); - switch (poll_ref.register(JSC.VirtualMachine.get().event_loop_handle.?, flag, true)) { - .err => |err| { - std.debug.panic("FilePoll.register failed: {d}", .{err.errno}); - }, - .result => {}, - } - } - }; -} -// pub const HTTPRequest = RequestBodyStreamer(false); -// pub const HTTPSRequest = RequestBodyStreamer(true); -// pub fn ResponseBodyStreamer(comptime is_ssl: bool) type { -// return struct { -// const Streamer = @This(); -// pub fn onEnqueue(this: *Streamer, buffer: []u8, ): anytype, -// pub fn onEnqueueMany(this: *Streamer): anytype, -// pub fn onClose(this: *Streamer): anytype, -// pub fn onError(this: *Streamer): anytype, -// }; -// } +const AnyBlob = bun.webcore.Blob.Any; diff --git a/src/bun.zig b/src/bun.zig index 5b83f866d1..c231f30c1b 100644 --- a/src/bun.zig +++ b/src/bun.zig @@ -88,19 +88,21 @@ pub const JSError = error{ pub const JSOOM = OOM || JSError; -pub const detectCI = @import("./ci_info.zig").detectCI; +pub const detectCI = @import("ci_info.zig").detectCI; + +/// Cross-platform system APIs +pub const sys = @import("sys.zig"); +/// Deprecated: use bun.sys.S +pub const S = sys.S; +pub const O = sys.O; +pub const Mode = sys.Mode; + +// Platform-specific system APIs. If something can be implemented on multiple +// platforms, it does not belong in these three namespaces. +pub const windows = @import("windows.zig"); +pub const darwin = @import("darwin.zig"); +pub const linux = @import("linux.zig"); -/// Adding to this namespace is considered deprecated. -/// -/// If the declaration truly came from C, it should be perfectly possible to -/// translate the definition and put it in `c-headers-for-zig.h`, and available -/// via the lowercase `c` namespace. Wrappers around functions should go in a -/// more specific namespace, such as `bun.spawn`, `bun.strings` or `bun.sys` -/// -/// By avoiding manual transcription of C headers into Zig, we avoid bugs due to -/// different definitions between platforms, as well as very common mistakes -/// that can be made when porting definitions. It also keeps code much cleaner. -pub const C = @import("c.zig"); /// Translated from `c-headers-for-zig.h` for the current platform. pub const c = @import("translated-c-headers"); @@ -116,9 +118,15 @@ pub const fmt = @import("./fmt.zig"); pub const allocators = @import("./allocators.zig"); pub const bun_js = @import("./bun_js.zig"); +// This file is gennerated, but cant be placed in the build/debug/codegen +// folder because zig will complain about outside-of-module stuff /// All functions and interfaces provided from Bun's `bindgen` utility. pub const gen = @import("bun.js/bindings/GeneratedBindings.zig"); + comptime { + // This file is gennerated, but cant be placed in the build/debug/codegen + // folder because zig will complain about outside-of-module stuff + _ = &@import("bun.js/bindings/GeneratedJS2Native.zig"); _ = &gen; // reference bindings } @@ -387,9 +395,6 @@ pub fn clone(item: anytype, allocator: std.mem.Allocator) !@TypeOf(item) { pub const StringBuilder = @import("./string.zig").StringBuilder; pub const LinearFifo = @import("./linear_fifo.zig").LinearFifo; -pub const linux = struct { - pub const memfd_allocator = @import("./allocators/linux_memfd_allocator.zig").LinuxMemFdAllocator; -}; /// hash a string pub fn hash(content: []const u8) u64 { @@ -623,7 +628,16 @@ pub const invalid_fd: FileDescriptor = .invalid; pub const simdutf = @import("./bun.js/bindings/bun-simdutf.zig"); -pub const JSC = @import("jsc.zig"); +/// Deprecated: Prefer the lowercase `jsc` since it is a namespace and not a struct. +pub const JSC = jsc; + +/// Bindings to JavaScriptCore and other JavaScript primatives. +/// Web and runtime-specific APIs should go in `webcore` and `api`. +pub const jsc = @import("bun.js/jsc.zig"); +/// JavaScript Web APIs +pub const webcore = @import("bun.js/webcore.zig"); +/// "api" in this context means "the Bun APIs", as in "the exposed JS APIs" +pub const api = @import("bun.js/api.zig"); pub const logger = @import("./logger.zig"); pub const ThreadPool = @import("./thread_pool.zig"); @@ -978,7 +992,7 @@ pub fn parseDouble(input: []const u8) !f64 { if (comptime Environment.isWasm) { return try std.fmt.parseFloat(f64, input); } - return JSC.WTF.parseDouble(input); + return JSC.wtf.parseDouble(input); } pub const SignalCode = enum(u8) { @@ -2002,13 +2016,6 @@ pub fn isRegularFile(mode: anytype) bool { return S.ISREG(@intCast(mode)); } -pub const sys = @import("./sys.zig"); -pub const O = sys.O; - -pub const Mode = C.Mode; - -pub const windows = @import("./windows.zig"); - pub const LazyBoolValue = enum { unknown, no, @@ -2556,12 +2563,12 @@ pub const io = @import("./io/io.zig"); const errno_map = errno_map: { var max_value = 0; - for (std.enums.values(C.SystemErrno)) |v| + for (std.enums.values(sys.SystemErrno)) |v| max_value = @max(max_value, @intFromEnum(v)); var map: [max_value + 1]anyerror = undefined; @memset(&map, error.Unexpected); - for (std.enums.values(C.SystemErrno)) |v| + for (std.enums.values(sys.SystemErrno)) |v| map[@intFromEnum(v)] = @field(anyerror, @tagName(v)); break :errno_map map; @@ -2592,8 +2599,6 @@ pub fn errnoToZigErr(err: anytype) anyerror { return error.Unexpected; } -pub const S = if (Environment.isWindows) C.S else std.posix.S; - pub const brotli = @import("./brotli.zig"); pub fn iterateDir(dir: std.fs.Dir) DirIterator.Iterator { @@ -3206,7 +3211,7 @@ pub fn memmove(output: []u8, input: []const u8) void { } if (Environment.isNative and !@inComptime()) { - C.memmove(output.ptr, input.ptr, input.len); + _ = c.memmove(output.ptr, input.ptr, input.len); } else { for (input, output) |input_byte, *out| { out.* = input_byte; @@ -3596,3 +3601,14 @@ pub const server = @import("./bun.js/api/server.zig"); pub const macho = @import("./macho.zig"); pub const valkey = @import("./valkey/index.zig"); pub const highway = @import("./highway.zig"); + +pub const MemoryReportingAllocator = @import("allocators/MemoryReportingAllocator.zig"); + +pub fn move(dest: []u8, src: []const u8) void { + if (comptime Environment.allow_assert) { + if (src.len != dest.len) { + bun.Output.panic("Move: src.len != dest.len, {d} != {d}", .{ src.len, dest.len }); + } + } + _ = bun.c.memmove(dest.ptr, src.ptr, src.len); +} diff --git a/src/bun_js.zig b/src/bun_js.zig index 06b57de6f7..f1ee9062fd 100644 --- a/src/bun_js.zig +++ b/src/bun_js.zig @@ -7,7 +7,7 @@ const strings = bun.strings; const MutableString = bun.MutableString; const stringZ = bun.stringZ; const default_allocator = bun.default_allocator; -const C = bun.C; + const std = @import("std"); const lex = bun.js_lexer; @@ -322,8 +322,8 @@ pub const Run = struct { } switch (this.ctx.debug.hot_reload) { - .hot => JSC.HotReloader.enableHotModuleReloading(vm), - .watch => JSC.WatchReloader.enableHotModuleReloading(vm), + .hot => JSC.hot_reloader.HotReloader.enableHotModuleReloading(vm), + .watch => JSC.hot_reloader.WatchReloader.enableHotModuleReloading(vm), else => {}, } @@ -476,7 +476,7 @@ pub const Run = struct { ); } - JSC.napi.fixDeadCodeElimination(); + bun.api.napi.fixDeadCodeElimination(); bun.crash_handler.fixDeadCodeElimination(); vm.globalExit(); } diff --git a/src/bundler/bundle_v2.zig b/src/bundler/bundle_v2.zig index b25f460153..c0c78a927d 100644 --- a/src/bundler/bundle_v2.zig +++ b/src/bundler/bundle_v2.zig @@ -53,7 +53,7 @@ const stringZ = bun.stringZ; const default_allocator = bun.default_allocator; const StoredFileDescriptorType = bun.StoredFileDescriptorType; const FeatureFlags = bun.FeatureFlags; -const C = bun.C; + const std = @import("std"); const lex = @import("../js_lexer.zig"); const Logger = @import("../logger.zig"); @@ -408,7 +408,7 @@ pub const ThreadPool = struct { }; }; -const Watcher = bun.JSC.NewHotReloader(BundleV2, EventLoop, true); +const Watcher = bun.JSC.hot_reloader.NewHotReloader(BundleV2, EventLoop, true); /// This assigns a concise, predictable, and unique `.pretty` attribute to a Path. /// DevServer relies on pretty paths for identifying modules, so they must be unique. @@ -1637,7 +1637,7 @@ pub const BundleV2 = struct { if (path.len > 0 and // Check for either node or bun builtins // We don't use the list from .bun because that includes third-party packages in some cases. - !JSC.HardcodedModule.Alias.has(path, .node) and + !JSC.ModuleLoader.HardcodedModule.Alias.has(path, .node) and !strings.hasPrefixComptime(path, "bun:") and !strings.eqlComptime(path, "bun")) { @@ -2999,7 +2999,7 @@ pub const BundleV2 = struct { } if (ast.target.isBun()) { - if (JSC.HardcodedModule.Alias.get(import_record.path.text, .bun)) |replacement| { + if (JSC.ModuleLoader.HardcodedModule.Alias.get(import_record.path.text, .bun)) |replacement| { // When bundling node builtins, remove the "node:" prefix. // This supports special use cases where the bundle is put // into a non-node module resolver that doesn't support @@ -15252,11 +15252,11 @@ pub const LinkerContext = struct { code_result.buffer = buf.items; } - switch (JSC.Node.NodeFS.writeFileWithPathBuffer( + switch (JSC.Node.fs.NodeFS.writeFileWithPathBuffer( &pathbuf, - JSC.Node.Arguments.WriteFile{ + .{ .data = JSC.Node.StringOrBuffer{ - .buffer = JSC.Buffer{ + .buffer = bun.api.node.Buffer{ .buffer = .{ .ptr = @constCast(output_source_map.ptr), // TODO: handle > 4 GB files @@ -15268,8 +15268,8 @@ pub const LinkerContext = struct { .encoding = .buffer, .dirfd = .fromStdDir(root_dir), .file = .{ - .path = JSC.Node.PathLike{ - .string = JSC.PathString.init(source_map_final_rel_path), + .path = .{ + .string = bun.PathString.init(source_map_final_rel_path), }, }, }, @@ -15343,11 +15343,11 @@ pub const LinkerContext = struct { @memcpy(fdpath[0..chunk.final_rel_path.len], chunk.final_rel_path); fdpath[chunk.final_rel_path.len..][0..bun.bytecode_extension.len].* = bun.bytecode_extension.*; defer cached_bytecode.deref(); - switch (JSC.Node.NodeFS.writeFileWithPathBuffer( + switch (JSC.Node.fs.NodeFS.writeFileWithPathBuffer( &pathbuf, - JSC.Node.Arguments.WriteFile{ - .data = JSC.Node.StringOrBuffer{ - .buffer = JSC.Buffer{ + .{ + .data = .{ + .buffer = .{ .buffer = .{ .ptr = @constCast(bytecode.ptr), .len = @as(u32, @truncate(bytecode.len)), @@ -15360,8 +15360,8 @@ pub const LinkerContext = struct { .dirfd = .fromStdDir(root_dir), .file = .{ - .path = JSC.Node.PathLike{ - .string = JSC.PathString.init(fdpath[0 .. chunk.final_rel_path.len + bun.bytecode_extension.len]), + .path = .{ + .string = bun.PathString.init(fdpath[0 .. chunk.final_rel_path.len + bun.bytecode_extension.len]), }, }, }, @@ -15399,11 +15399,11 @@ pub const LinkerContext = struct { break :brk null; }; - switch (JSC.Node.NodeFS.writeFileWithPathBuffer( + switch (JSC.Node.fs.NodeFS.writeFileWithPathBuffer( &pathbuf, - JSC.Node.Arguments.WriteFile{ - .data = JSC.Node.StringOrBuffer{ - .buffer = JSC.Buffer{ + .{ + .data = .{ + .buffer = .{ .buffer = .{ .ptr = @constCast(code_result.buffer.ptr), // TODO: handle > 4 GB files @@ -15418,7 +15418,7 @@ pub const LinkerContext = struct { .dirfd = .fromStdDir(root_dir), .file = .{ .path = JSC.Node.PathLike{ - .string = JSC.PathString.init(rel_path), + .string = bun.PathString.init(rel_path), }, }, }, @@ -15519,11 +15519,11 @@ pub const LinkerContext = struct { } } - switch (JSC.Node.NodeFS.writeFileWithPathBuffer( + switch (JSC.Node.fs.NodeFS.writeFileWithPathBuffer( &pathbuf, - JSC.Node.Arguments.WriteFile{ - .data = JSC.Node.StringOrBuffer{ - .buffer = JSC.Buffer{ + .{ + .data = .{ + .buffer = .{ .buffer = .{ .ptr = @constCast(bytes.ptr), .len = @as(u32, @truncate(bytes.len)), @@ -15535,7 +15535,7 @@ pub const LinkerContext = struct { .dirfd = .fromStdDir(root_dir), .file = .{ .path = JSC.Node.PathLike{ - .string = JSC.PathString.init(src.dest_path), + .string = bun.PathString.init(src.dest_path), }, }, }, @@ -16000,7 +16000,7 @@ pub const LinkerContext = struct { // "undefined" instead of emitting an error. symbol.import_item_status = .missing; - if (c.resolver.opts.target == .browser and JSC.HardcodedModule.Alias.has(next_source.path.pretty, .bun)) { + if (c.resolver.opts.target == .browser and JSC.ModuleLoader.HardcodedModule.Alias.has(next_source.path.pretty, .bun)) { c.log.addRangeWarningFmtWithNote( source, r, diff --git a/src/bunfig.zig b/src/bunfig.zig index 9acd0d1c8e..ce984b35e0 100644 --- a/src/bunfig.zig +++ b/src/bunfig.zig @@ -9,7 +9,7 @@ const MutableString = bun.MutableString; const stringZ = bun.stringZ; const default_allocator = bun.default_allocator; const URL = @import("./url.zig").URL; -const C = bun.C; + const options = @import("./options.zig"); const logger = bun.logger; const js_ast = bun.JSAst; diff --git a/src/c-headers-for-zig.h b/src/c-headers-for-zig.h index 6e791455d6..90e2998161 100644 --- a/src/c-headers-for-zig.h +++ b/src/c-headers-for-zig.h @@ -4,6 +4,10 @@ // into Zig code. By using automatic translation, differences // between platforms and subtle mistakes can be avoided. // +// One way to locate a definition for a given symbol is to open +// Zig's `lib` directory and run ripgrep on it. For example, +// `sockaddr_dl` is in `libc/include/any-macos-any/net/if_dl.h` +// // When Zig is translating this file, it will define these macros: // - WINDOWS // - DARWIN @@ -19,31 +23,49 @@ #include "../packages/bun-native-bundler-plugin-api/bundler_plugin.h" #if POSIX -#include "pwd.h" -#include +#include #include +#include +#include +#include +#include #endif #if DARWIN -#include -#include -#include +#include +#include +#include +#include +#include +#include #include +#include #include -#include #include -#elif LINUX -#include #include -#include -#include -#include +#include +#include +#elif LINUX #include -#include #include +#include +#include +#include +#include +#include +#include #endif #if WINDOWS #include #include #endif + +#if WINDOWS +#include +#include +#endif + +#undef lstat +#undef fstat +#undef stat diff --git a/src/c.zig b/src/c.zig deleted file mode 100644 index ca9bba572d..0000000000 --- a/src/c.zig +++ /dev/null @@ -1,517 +0,0 @@ -//! Adding to this namespace is considered deprecated. -//! -//! If the declaration truly came from C, it should be perfectly possible to -//! translate the definition and put it in `c-headers-for-zig.h`, and available -//! via the lowercase `c` namespace. Wrappers around functions should go in a -//! more specific namespace, such as `bun.spawn`, `bun.strings` or `bun.sys` -//! -//! By avoiding manual transcription of C headers into Zig, we avoid bugs due to -//! different definitions between platforms, as well as very common mistakes -//! that can be made when porting definitions. It also keeps code much cleaner. -const std = @import("std"); -const bun = @import("bun"); -const Environment = @import("./env.zig"); - -const translated = @import("translated-c-headers"); - -const PlatformSpecific = switch (Environment.os) { - .mac => @import("darwin_c.zig"), - .linux => @import("linux_c.zig"), - .windows => @import("windows_c.zig"), - else => struct {}, -}; -pub usingnamespace PlatformSpecific; - -const C = std.c; -const builtin = @import("builtin"); -const posix = std.posix; -const mem = std.mem; -const Stat = std.fs.File.Stat; -const Kind = std.fs.File.Kind; -const StatError = std.fs.File.StatError; -const errno = posix.errno; -const mode_t = bun.Mode; -// TODO: this is wrong on Windows -const libc_stat = bun.Stat; - -const zeroes = mem.zeroes; -pub const darwin = @import("./darwin_c.zig"); -pub const linux = @import("./linux_c.zig"); -pub extern "c" fn chmod([*c]const u8, mode_t) c_int; -pub extern "c" fn fchmod(std.c.fd_t, mode_t) c_int; -pub extern "c" fn fchmodat(c_int, [*c]const u8, mode_t, c_int) c_int; -pub extern "c" fn fchown(std.c.fd_t, std.c.uid_t, std.c.gid_t) c_int; -pub extern "c" fn lchown(path: [*:0]const u8, std.c.uid_t, std.c.gid_t) c_int; -pub extern "c" fn chown(path: [*:0]const u8, std.c.uid_t, std.c.gid_t) c_int; -pub extern "c" fn lchmod(path: [*:0]const u8, mode: mode_t) c_int; -pub extern "c" fn truncate([*:0]const u8, i64) c_int; // note: truncate64 is not a thing - -pub extern "c" fn lutimes(path: [*:0]const u8, times: *const [2]std.posix.timeval) c_int; -pub extern "c" fn mkdtemp(template: [*c]u8) ?[*:0]u8; - -pub extern "c" fn memcmp(s1: [*c]const u8, s2: [*c]const u8, n: usize) c_int; -pub extern "c" fn memchr(s: [*]const u8, c: u8, n: usize) ?[*]const u8; - -pub extern "c" fn strchr(str: [*]const u8, char: u8) ?[*]const u8; - -pub fn lstat_absolute(path: [:0]const u8) !Stat { - if (builtin.os.tag == .windows) { - @compileError("Not implemented yet, consider using bun.sys.lstat()"); - } - - var st = zeroes(libc_stat); - switch (errno(bun.C.lstat(path.ptr, &st))) { - .SUCCESS => {}, - .NOENT => return error.FileNotFound, - // .EINVAL => unreachable, - .BADF => unreachable, // Always a race condition. - .NOMEM => return error.SystemResources, - .ACCES => return error.AccessDenied, - else => |err| return posix.unexpectedErrno(err), - } - - const atime = st.atime(); - const mtime = st.mtime(); - const ctime = st.ctime(); - return Stat{ - .inode = st.ino, - .size = @as(u64, @bitCast(st.size)), - .mode = st.mode, - .kind = switch (builtin.os.tag) { - .wasi => switch (st.filetype) { - posix.FILETYPE_BLOCK_DEVICE => Kind.block_device, - posix.FILETYPE_CHARACTER_DEVICE => Kind.character_device, - posix.FILETYPE_DIRECTORY => Kind.directory, - posix.FILETYPE_SYMBOLIC_LINK => Kind.sym_link, - posix.FILETYPE_REGULAR_FILE => Kind.file, - posix.FILETYPE_SOCKET_STREAM, posix.FILETYPE_SOCKET_DGRAM => Kind.unix_domain_socket, - else => Kind.unknown, - }, - else => switch (st.mode & posix.S.IFMT) { - posix.S.IFBLK => Kind.block_device, - posix.S.IFCHR => Kind.character_device, - posix.S.IFDIR => Kind.directory, - posix.S.IFIFO => Kind.named_pipe, - posix.S.IFLNK => Kind.sym_link, - posix.S.IFREG => Kind.file, - posix.S.IFSOCK => Kind.unix_domain_socket, - else => Kind.unknown, - }, - }, - .atime = @as(i128, atime.sec) * std.time.ns_per_s + atime.nsec, - .mtime = @as(i128, mtime.sec) * std.time.ns_per_s + mtime.nsec, - .ctime = @as(i128, ctime.sec) * std.time.ns_per_s + ctime.nsec, - }; -} - -// renameatZ fails when renaming across mount points -// we assume that this is relatively uncommon -pub fn moveFileZ(from_dir: bun.FileDescriptor, filename: [:0]const u8, to_dir: bun.FileDescriptor, destination: [:0]const u8) !void { - switch (bun.sys.renameatConcurrentlyWithoutFallback(from_dir, filename, to_dir, destination)) { - .err => |err| { - // allow over-writing an empty directory - if (err.getErrno() == .ISDIR) { - _ = bun.sys.rmdirat(to_dir, destination.ptr); - try bun.sys.renameat(from_dir, filename, to_dir, destination).unwrap(); - return; - } - - if (err.getErrno() == .XDEV) { - try moveFileZSlow(from_dir, filename, to_dir, destination); - } else { - return bun.errnoToZigErr(err.errno); - } - }, - .result => {}, - } -} - -pub fn moveFileZWithHandle(from_handle: bun.FileDescriptor, from_dir: bun.FileDescriptor, filename: [:0]const u8, to_dir: bun.FileDescriptor, destination: [:0]const u8) !void { - switch (bun.sys.renameat(from_dir, filename, to_dir, destination)) { - .err => |err| { - // allow over-writing an empty directory - if (err.getErrno() == .ISDIR) { - _ = bun.sys.rmdirat(to_dir, destination.ptr); - - try (bun.sys.renameat(from_dir, filename, to_dir, destination).unwrap()); - return; - } - - if (err.getErrno() == .XDEV) { - try copyFileZSlowWithHandle(from_handle, to_dir, destination).unwrap(); - _ = bun.sys.unlinkat(from_dir, filename); - } - - return bun.errnoToZigErr(err.errno); - }, - .result => {}, - } -} - -const Maybe = bun.sys.Maybe; - -// On Linux, this will be fast because sendfile() supports copying between two file descriptors on disk -// macOS & BSDs will be slow because -pub fn moveFileZSlow(from_dir: bun.FileDescriptor, filename: [:0]const u8, to_dir: bun.FileDescriptor, destination: [:0]const u8) !void { - return try moveFileZSlowMaybe(from_dir, filename, to_dir, destination).unwrap(); -} - -pub fn moveFileZSlowMaybe(from_dir: bun.FileDescriptor, filename: [:0]const u8, to_dir: bun.FileDescriptor, destination: [:0]const u8) Maybe(void) { - const in_handle = switch (bun.sys.openat(from_dir, filename, bun.O.RDONLY | bun.O.CLOEXEC, if (Environment.isWindows) 0 else 0o644)) { - .result => |f| f, - .err => |e| return .{ .err = e }, - }; - defer in_handle.close(); - _ = from_dir.unlinkat(filename); - return copyFileZSlowWithHandle(in_handle, to_dir, destination); -} - -pub fn copyFileZSlowWithHandle(in_handle: bun.FileDescriptor, to_dir: bun.FileDescriptor, destination: [:0]const u8) Maybe(void) { - if (comptime Environment.isWindows) { - var buf0: bun.WPathBuffer = undefined; - var buf1: bun.WPathBuffer = undefined; - - const dest = switch (bun.sys.normalizePathWindows(u8, to_dir, destination, &buf0, .{})) { - .result => |x| x, - .err => |e| return .{ .err = e }, - }; - const src_len = bun.windows.GetFinalPathNameByHandleW(in_handle.cast(), &buf1, buf1.len, 0); - if (src_len == 0) { - return Maybe(void).errno(bun.C.E.BUSY, .GetFinalPathNameByHandle); - } else if (src_len >= buf1.len) { - return Maybe(void).errno(bun.C.E.NAMETOOLONG, .GetFinalPathNameByHandle); - } - const src = buf1[0..src_len :0]; - return bun.copyFile(src, dest); - } else { - const stat_ = switch (bun.sys.fstat(in_handle)) { - .result => |s| s, - .err => |e| return .{ .err = e }, - }; - - // Attempt to delete incase it already existed. - // This fixes ETXTBUSY on Linux - _ = bun.sys.unlinkat(to_dir, destination); - - const out_handle = switch (bun.sys.openat( - to_dir, - destination, - bun.O.WRONLY | bun.O.CREAT | bun.O.CLOEXEC | bun.O.TRUNC, - if (comptime Environment.isPosix) 0o644 else 0, - )) { - .result => |fd| fd, - .err => |e| return .{ .err = e }, - }; - defer out_handle.close(); - - if (comptime Environment.isLinux) { - _ = std.os.linux.fallocate(out_handle.cast(), 0, 0, @intCast(stat_.size)); - } - - switch (bun.copyFile(in_handle, out_handle)) { - .err => |e| return .{ .err = e }, - .result => {}, - } - - if (comptime Environment.isPosix) { - _ = fchmod(out_handle.cast(), stat_.mode); - _ = fchown(out_handle.cast(), stat_.uid, stat_.gid); - } - - return Maybe(void).success; - } -} - -pub fn kindFromMode(mode: mode_t) std.fs.File.Kind { - return switch (mode & bun.S.IFMT) { - bun.S.IFBLK => std.fs.File.Kind.block_device, - bun.S.IFCHR => std.fs.File.Kind.character_device, - bun.S.IFDIR => std.fs.File.Kind.directory, - bun.S.IFIFO => std.fs.File.Kind.named_pipe, - bun.S.IFLNK => std.fs.File.Kind.sym_link, - bun.S.IFREG => std.fs.File.Kind.file, - bun.S.IFSOCK => std.fs.File.Kind.unix_domain_socket, - else => .unknown, - }; -} - -pub fn getSelfExeSharedLibPaths(allocator: std.mem.Allocator) error{OutOfMemory}![][:0]u8 { - const List = std.ArrayList([:0]u8); - switch (builtin.os.tag) { - .linux, - .freebsd, - .netbsd, - .dragonfly, - .openbsd, - .solaris, - => { - var paths = List.init(allocator); - errdefer { - const slice = paths.toOwnedSlice() catch &.{}; - for (slice) |item| { - allocator.free(item); - } - allocator.free(slice); - } - try posix.dl_iterate_phdr(&paths, error{OutOfMemory}, struct { - fn callback(info: *posix.dl_phdr_info, size: usize, list: *List) !void { - _ = size; - const name = info.dlpi_name orelse return; - if (name[0] == '/') { - const item = try list.allocator.dupeZ(u8, mem.sliceTo(name, 0)); - errdefer list.allocator.free(item); - try list.append(item); - } - } - }.callback); - return try paths.toOwnedSlice(); - }, - .macos, .ios, .watchos, .tvos => { - var paths = List.init(allocator); - errdefer { - const slice = paths.toOwnedSlice() catch &.{}; - for (slice) |item| { - allocator.free(item); - } - allocator.free(slice); - } - const img_count = std.c._dyld_image_count(); - for (0..img_count) |i| { - const name = std.c._dyld_get_image_name(i); - const item = try allocator.dupeZ(u8, mem.sliceTo(name, 0)); - errdefer allocator.free(item); - try paths.append(item); - } - return try paths.toOwnedSlice(); - }, - // revisit if Haiku implements dl_iterat_phdr (https://dev.haiku-os.org/ticket/15743) - .haiku => { - var paths = List.init(allocator); - errdefer { - const slice = paths.toOwnedSlice() catch &.{}; - for (slice) |item| { - allocator.free(item); - } - allocator.free(slice); - } - - const b = "/boot/system/runtime_loader"; - const item = try allocator.dupeZ(u8, mem.sliceTo(b, 0)); - errdefer allocator.free(item); - try paths.append(item); - - return try paths.toOwnedSlice(); - }, - else => @compileError("getSelfExeSharedLibPaths unimplemented for this target"), - } -} - -/// The madvise() system call allows a process that has knowledge of its mem-ory memory -/// ory behavior to describe it to the system. The advice passed in may be -/// used by the system to alter its virtual memory paging strategy. This -/// advice may improve application and system performance. The behavior -/// specified in advice can only be one of the following values: -/// -/// MADV_NORMAL Indicates that the application has no advice to give on -/// its behavior in the specified address range. This is -/// the system default behavior. This is used with -/// madvise() system call. -/// -/// POSIX_MADV_NORMAL -/// Same as MADV_NORMAL but used with posix_madvise() system -/// call. -/// -/// MADV_SEQUENTIAL Indicates that the application expects to access this -/// address range in a sequential manner. This is used with -/// madvise() system call. -/// -/// POSIX_MADV_SEQUENTIAL -/// Same as MADV_SEQUENTIAL but used with posix_madvise() -/// system call. -/// -/// MADV_RANDOM Indicates that the application expects to access this -/// address range in a random manner. This is used with -/// madvise() system call. -/// -/// POSIX_MADV_RANDOM -/// Same as MADV_RANDOM but used with posix_madvise() system -/// call. -/// -/// MADV_WILLNEED Indicates that the application expects to access this -/// address range soon. This is used with madvise() system -/// call. -/// -/// POSIX_MADV_WILLNEED -/// Same as MADV_WILLNEED but used with posix_madvise() sys-tem system -/// tem call. -/// -/// MADV_DONTNEED Indicates that the application is not expecting to -/// access this address range soon. This is used with -/// madvise() system call. -/// -/// POSIX_MADV_DONTNEED -/// Same as MADV_DONTNEED but used with posix_madvise() sys-tem system -/// tem call. -/// -/// MADV_FREE Indicates that the application will not need the information -/// contained in this address range, so the pages may -/// be reused right away. The address range will remain -/// valid. This is used with madvise() system call. -/// -/// The posix_madvise() behaves same as madvise() except that it uses values -/// with POSIX_ prefix for the advice system call argument. -pub extern "c" fn posix_madvise(ptr: *anyopaque, len: usize, advice: i32) c_int; - -pub fn setProcessPriority(pid: i32, priority: i32) std.c.E { - if (pid < 0) return .SRCH; - - const code: i32 = set_process_priority(pid, priority); - - if (code == -2) return .SRCH; - if (code == 0) return .SUCCESS; - - const errcode = bun.sys.getErrno(code); - return @enumFromInt(@intFromEnum(errcode)); -} - -pub fn getVersion(buf: []u8) []const u8 { - if (comptime Environment.isLinux) { - return linux.get_version(buf.ptr[0..bun.HOST_NAME_MAX]); - } else if (comptime Environment.isMac) { - return darwin.get_version(buf); - } else { - var info: bun.windows.libuv.uv_utsname_s = undefined; - const err = bun.windows.libuv.uv_os_uname(&info); - if (err != 0) { - return "unknown"; - } - const slice = bun.sliceTo(&info.version, 0); - @memcpy(buf[0..slice.len], slice); - return buf[0..slice.len]; - } -} - -pub fn getRelease(buf: []u8) []const u8 { - if (comptime Environment.isLinux) { - return linux.get_release(buf.ptr[0..bun.HOST_NAME_MAX]); - } else if (comptime Environment.isMac) { - return darwin.get_release(buf); - } else { - var info: bun.windows.libuv.uv_utsname_s = undefined; - const err = bun.windows.libuv.uv_os_uname(&info); - if (err != 0) { - return "unknown"; - } - const release = bun.sliceTo(&info.release, 0); - @memcpy(buf[0..release.len], release); - return buf[0..release.len]; - } -} - -pub extern fn cfmakeraw(*std.posix.termios) void; - -const LazyStatus = enum { - pending, - loaded, - failed, -}; - -pub fn _dlsym(handle: ?*anyopaque, name: [:0]const u8) ?*anyopaque { - if (comptime Environment.isWindows) { - return bun.windows.GetProcAddressA(handle, name); - } else if (comptime Environment.isMac or Environment.isLinux) { - return std.c.dlsym(handle, name.ptr); - } - - @compileError("dlsym unimplemented for this target"); -} - -pub fn dlsymWithHandle(comptime Type: type, comptime name: [:0]const u8, comptime handle_getter: fn () ?*anyopaque) ?Type { - if (comptime @typeInfo(Type) != .pointer) { - @compileError("dlsym must be a pointer type (e.g. ?const *fn()). Received " ++ @typeName(Type) ++ "."); - } - - const Wrapper = struct { - pub var function: Type = undefined; - var failed = false; - pub var once = std.once(loadOnce); - fn loadOnce() void { - function = bun.cast(Type, _dlsym(@call(bun.callmod_inline, handle_getter, .{}), name) orelse { - failed = true; - return; - }); - } - }; - Wrapper.once.call(); - if (Wrapper.failed) { - return null; - } - return Wrapper.function; -} - -pub fn dlsym(comptime Type: type, comptime name: [:0]const u8) ?Type { - const handle_getter = struct { - const RTLD_DEFAULT = if (bun.Environment.isMac) - @as(?*anyopaque, @ptrFromInt(@as(usize, @bitCast(@as(isize, -2))))) - else - @as(?*anyopaque, @ptrFromInt(@as(usize, 0))); - - pub fn getter() ?*anyopaque { - return RTLD_DEFAULT; - } - }.getter; - - return dlsymWithHandle(Type, name, handle_getter); -} - -/// Error condition is encoded as null -/// The only error in this function is ESRCH (no process found) -pub fn getProcessPriority(pid: i32) ?i32 { - return switch (get_process_priority(pid)) { - std.math.maxInt(i32) => null, - else => |prio| prio, - }; -} - -// set in c-bindings.cpp -extern fn get_process_priority(pid: i32) i32; -pub extern fn set_process_priority(pid: i32, priority: i32) i32; - -pub extern fn strncasecmp(s1: [*]const u8, s2: [*]const u8, n: usize) i32; -pub extern fn memmove(dest: [*]u8, src: [*]const u8, n: usize) void; - -pub fn move(dest: []u8, src: []const u8) void { - if (comptime Environment.allow_assert) { - if (src.len != dest.len) { - bun.Output.panic("Move: src.len != dest.len, {d} != {d}", .{ src.len, dest.len }); - } - } - memmove(dest.ptr, src.ptr, src.len); -} - -// https://man7.org/linux/man-pages/man3/fmod.3.html -pub extern fn fmod(f64, f64) f64; - -pub fn dlopen(filename: [:0]const u8, flags: C.RTLD) ?*anyopaque { - if (comptime Environment.isWindows) { - return bun.windows.LoadLibraryA(filename); - } - - return std.c.dlopen(filename, flags); -} - -pub extern "c" fn Bun__ttySetMode(fd: c_int, mode: c_int) c_int; - -pub extern "c" fn bun_initialize_process() void; -pub extern "c" fn bun_restore_stdio() void; -pub extern "c" fn open_as_nonblocking_tty(i32, i32) i32; - -pub extern fn strlen(ptr: [*c]const u8) usize; - -pub const passwd = translated.passwd; -pub const geteuid = translated.geteuid; -pub const getpwuid_r = translated.getpwuid_r; - -export fn Bun__errnoName(err: c_int) ?[*:0]const u8 { - return @tagName(bun.C.SystemErrno.init(err) orelse return null); -} diff --git a/src/cache.zig b/src/cache.zig index 0b2c88e973..9679b1ede0 100644 --- a/src/cache.zig +++ b/src/cache.zig @@ -9,7 +9,6 @@ const MutableString = bun.MutableString; const stringZ = bun.stringZ; const FeatureFlags = bun.FeatureFlags; const default_allocator = bun.default_allocator; -const C = bun.C; const js_ast = bun.JSAst; const logger = bun.logger; diff --git a/src/cli.zig b/src/cli.zig index 97f1a74935..5a801aee0b 100644 --- a/src/cli.zig +++ b/src/cli.zig @@ -8,7 +8,7 @@ const MutableString = bun.MutableString; const stringZ = bun.stringZ; const default_allocator = bun.default_allocator; const FeatureFlags = bun.FeatureFlags; -const C = bun.C; + const std = @import("std"); const lex = bun.js_lexer; const logger = bun.logger; diff --git a/src/cli/build_command.zig b/src/cli/build_command.zig index c80a6f4813..fc7fb2de60 100644 --- a/src/cli/build_command.zig +++ b/src/cli/build_command.zig @@ -9,7 +9,6 @@ const strings = bun.strings; const MutableString = bun.MutableString; const stringZ = bun.stringZ; const default_allocator = bun.default_allocator; -const C = bun.C; const lex = bun.js_lexer; const logger = bun.logger; diff --git a/src/cli/bunx_command.zig b/src/cli/bunx_command.zig index ae1314495c..333f00ed3e 100644 --- a/src/cli/bunx_command.zig +++ b/src/cli/bunx_command.zig @@ -9,7 +9,7 @@ const strings = bun.strings; const MutableString = bun.MutableString; const stringZ = bun.stringZ; const default_allocator = bun.default_allocator; -const C = bun.C; + const cli = @import("../cli.zig"); const Command = cli.Command; @@ -478,7 +478,7 @@ pub const BunxCommand = struct { // where a user can replace the directory with malicious code. // // If this format changes, please update cache clearing code in package_manager_command.zig - const uid = if (bun.Environment.isPosix) bun.C.getuid() else bun.windows.userUniqueId(); + const uid = if (bun.Environment.isPosix) bun.c.getuid() else bun.windows.userUniqueId(); PATH = switch (PATH.len > 0) { inline else => |path_is_nonzero| try std.fmt.allocPrint( ctx.allocator, diff --git a/src/cli/colon_list_type.zig b/src/cli/colon_list_type.zig index 3623185e8a..154acc694d 100644 --- a/src/cli/colon_list_type.zig +++ b/src/cli/colon_list_type.zig @@ -7,7 +7,7 @@ const strings = bun.strings; const MutableString = bun.MutableString; const stringZ = bun.stringZ; const default_allocator = bun.default_allocator; -const C = bun.C; + const std = @import("std"); pub fn ColonListType(comptime t: type, comptime value_resolver: anytype) type { diff --git a/src/cli/create_command.zig b/src/cli/create_command.zig index 03f9abd815..823a1d7a59 100644 --- a/src/cli/create_command.zig +++ b/src/cli/create_command.zig @@ -7,7 +7,7 @@ const strings = bun.strings; const MutableString = bun.MutableString; const stringZ = bun.stringZ; const default_allocator = bun.default_allocator; -const C = bun.C; + const std = @import("std"); const Progress = bun.Progress; diff --git a/src/cli/discord_command.zig b/src/cli/discord_command.zig index 2bcdb456e4..9164f74495 100644 --- a/src/cli/discord_command.zig +++ b/src/cli/discord_command.zig @@ -7,7 +7,7 @@ const strings = bun.strings; const MutableString = bun.MutableString; const stringZ = bun.stringZ; const default_allocator = bun.default_allocator; -const C = bun.C; + const std = @import("std"); const open = @import("../open.zig"); diff --git a/src/cli/exec_command.zig b/src/cli/exec_command.zig index 1644fa312a..61dc04ea13 100644 --- a/src/cli/exec_command.zig +++ b/src/cli/exec_command.zig @@ -7,7 +7,7 @@ const strings = bun.strings; const MutableString = bun.MutableString; const stringZ = bun.stringZ; const default_allocator = bun.default_allocator; -const C = bun.C; + const std = @import("std"); const open = @import("../open.zig"); const Command = bun.CLI.Command; diff --git a/src/cli/init_command.zig b/src/cli/init_command.zig index 1b4e8cd285..3de145ae89 100644 --- a/src/cli/init_command.zig +++ b/src/cli/init_command.zig @@ -7,7 +7,7 @@ const strings = bun.strings; const MutableString = bun.MutableString; const stringZ = bun.stringZ; const default_allocator = bun.default_allocator; -const C = bun.C; + const std = @import("std"); const open = @import("../open.zig"); const CLI = @import("../cli.zig"); diff --git a/src/cli/install_completions_command.zig b/src/cli/install_completions_command.zig index 0c0db9366b..23e43d55a7 100644 --- a/src/cli/install_completions_command.zig +++ b/src/cli/install_completions_command.zig @@ -7,7 +7,7 @@ const strings = bun.strings; const MutableString = bun.MutableString; const stringZ = bun.stringZ; const default_allocator = bun.default_allocator; -const C = bun.C; + const std = @import("std"); const lex = bun.js_lexer; diff --git a/src/cli/pack_command.zig b/src/cli/pack_command.zig index 55113bcd8e..2a7d1587d1 100644 --- a/src/cli/pack_command.zig +++ b/src/cli/pack_command.zig @@ -2585,7 +2585,7 @@ pub const bindings = struct { }, else => { const pathname = archive_entry.pathname(); - const kind = bun.C.kindFromMode(archive_entry.filetype()); + const kind = bun.sys.kindFromMode(archive_entry.filetype()); const perm = archive_entry.perm(); var entry_info: EntryInfo = .{ diff --git a/src/cli/package_manager_command.zig b/src/cli/package_manager_command.zig index bef08d80e9..44101451b6 100644 --- a/src/cli/package_manager_command.zig +++ b/src/cli/package_manager_command.zig @@ -274,7 +274,7 @@ pub const PackageManagerCommand = struct { // This is to match 'bunx_command.BunxCommand.exec's logic const prefix = try std.fmt.allocPrint(ctx.allocator, "bunx-{d}-", .{ - if (bun.Environment.isPosix) bun.C.getuid() else bun.windows.userUniqueId(), + if (bun.Environment.isPosix) bun.c.getuid() else bun.windows.userUniqueId(), }); var deleted: usize = 0; diff --git a/src/cli/run_command.zig b/src/cli/run_command.zig index 7043f9b7da..c9daabb7f1 100644 --- a/src/cli/run_command.zig +++ b/src/cli/run_command.zig @@ -8,7 +8,7 @@ const strings = bun.strings; const MutableString = bun.MutableString; const stringZ = bun.stringZ; const default_allocator = bun.default_allocator; -const C = bun.C; + const std = @import("std"); const uws = bun.uws; const JSC = bun.JSC; diff --git a/src/cli/shell_completions.zig b/src/cli/shell_completions.zig index 383961b616..223096be6d 100644 --- a/src/cli/shell_completions.zig +++ b/src/cli/shell_completions.zig @@ -8,7 +8,6 @@ const strings = bun.strings; const MutableString = bun.MutableString; const stringZ = bun.stringZ; const default_allocator = bun.default_allocator; -const C = bun.C; pub const Shell = enum { unknown, diff --git a/src/cli/test_command.zig b/src/cli/test_command.zig index 5858407a86..5a52b13ff3 100644 --- a/src/cli/test_command.zig +++ b/src/cli/test_command.zig @@ -7,7 +7,7 @@ const strings = bun.strings; const MutableString = bun.MutableString; const stringZ = bun.stringZ; const default_allocator = bun.default_allocator; -const C = bun.C; + const std = @import("std"); const OOM = bun.OOM; @@ -823,7 +823,7 @@ pub const CommandLineReporter = struct { if (comptime !reporters.lcov) break :brk .{ {}, {}, {}, {} }; // Ensure the directory exists - var fs = bun.JSC.Node.NodeFS{}; + var fs = bun.JSC.Node.fs.NodeFS{}; _ = fs.mkdirRecursive( .{ .path = bun.JSC.Node.PathLike{ @@ -944,7 +944,7 @@ pub const CommandLineReporter = struct { try lcov_buffered_writer.flush(); lcov_file.close(); const cwd = bun.FD.cwd(); - bun.C.moveFileZ( + bun.sys.moveFileZ( cwd, lcov_name, cwd, @@ -1045,7 +1045,7 @@ pub const TestCommand = struct { var snapshot_values = Snapshots.ValuesHashMap.init(ctx.allocator); var snapshot_counts = bun.StringHashMap(usize).init(ctx.allocator); var inline_snapshots_to_write = std.AutoArrayHashMap(TestRunner.File.ID, std.ArrayList(Snapshots.InlineSnapshotToWrite)).init(ctx.allocator); - JSC.isBunTest = true; + JSC.VirtualMachine.isBunTest = true; var reporter = try ctx.allocator.create(CommandLineReporter); reporter.* = CommandLineReporter{ @@ -1228,8 +1228,8 @@ pub const TestCommand = struct { vm.hot_reload = ctx.debug.hot_reload; switch (vm.hot_reload) { - .hot => JSC.HotReloader.enableHotModuleReloading(vm), - .watch => JSC.WatchReloader.enableHotModuleReloading(vm), + .hot => JSC.hot_reloader.HotReloader.enableHotModuleReloading(vm), + .watch => JSC.hot_reloader.WatchReloader.enableHotModuleReloading(vm), else => {}, } diff --git a/src/cli/upgrade_command.zig b/src/cli/upgrade_command.zig index 79c404c974..d4bdc2ad08 100644 --- a/src/cli/upgrade_command.zig +++ b/src/cli/upgrade_command.zig @@ -7,7 +7,6 @@ const strings = bun.strings; const MutableString = bun.MutableString; const stringZ = bun.stringZ; const default_allocator = bun.default_allocator; -const C = bun.C; const std = @import("std"); const Progress = bun.Progress; @@ -811,7 +810,7 @@ pub const UpgradeCommand = struct { current_executable_buf[target_dir_.len] = 0; } - C.moveFileZ(.fromStdDir(save_dir), exe, .fromStdDir(target_dir), target_filename) catch |err| { + bun.sys.moveFileZ(.fromStdDir(save_dir), exe, .fromStdDir(target_dir), target_filename) catch |err| { defer save_dir_.deleteTree(version_name) catch {}; if (comptime Environment.isWindows) { diff --git a/src/codegen/bindgen.ts b/src/codegen/bindgen.ts index 72bdd03a71..dbd0a9af69 100644 --- a/src/codegen/bindgen.ts +++ b/src/codegen/bindgen.ts @@ -1127,7 +1127,7 @@ const headers = new Set(); zig.line('const bun = @import("bun");'); zig.line("const JSC = bun.JSC;"); -zig.line("const JSHostFunctionType = JSC.JSHostFunctionType;\n"); +zig.line("const JSHostFunctionType = JSC.JSHostFn;\n"); zigInternal.line("const binding_internals = struct {"); zigInternal.indent(); @@ -1446,7 +1446,7 @@ for (const [filename, { functions, typedefs }] of files) { const minArgCount = fn.variants.reduce((acc, vari) => Math.min(acc, vari.args.length), Number.MAX_SAFE_INTEGER); zig.line(`pub fn ${wrapperName}(global: *JSC.JSGlobalObject) callconv(JSC.conv) JSC.JSValue {`); zig.line( - ` return JSC.NewRuntimeFunction(global, JSC.ZigString.static(${str(fn.name)}), ${minArgCount}, js${cap(fn.name)}, false, false, null);`, + ` return JSC.host_fn.NewRuntimeFunction(global, JSC.ZigString.static(${str(fn.name)}), ${minArgCount}, js${cap(fn.name)}, false, false, null);`, ); zig.line(`}`); } diff --git a/src/codegen/generate-classes.ts b/src/codegen/generate-classes.ts index ee7e7570c0..d18805a9aa 100644 --- a/src/codegen/generate-classes.ts +++ b/src/codegen/generate-classes.ts @@ -1919,7 +1919,7 @@ const JavaScriptCoreBindings = struct { output += ` pub fn ${classSymbolName(typeName, "call")}(globalObject: *JSC.JSGlobalObject, callFrame: *JSC.CallFrame) callconv(JSC.conv) JSC.JSValue { if (comptime Environment.enable_logs) log_zig_call("${typeName}", callFrame); - return @call(.always_inline, JSC.toJSHostFunction(${typeName}.call), .{globalObject, callFrame}); + return @call(.always_inline, JSC.toJSHostFn(${typeName}.call), .{globalObject, callFrame}); } `; } @@ -2019,7 +2019,7 @@ const JavaScriptCoreBindings = struct { output += ` pub fn ${names.fn}(globalObject: *JSC.JSGlobalObject, callFrame: *JSC.CallFrame) callconv(JSC.conv) JSC.JSValue { if (comptime Environment.enable_logs) log_zig_class_method("${typeName}", "${name}", callFrame); - return @call(.always_inline, JSC.toJSHostFunction(${typeName}.${fn}), .{globalObject, callFrame}); + return @call(.always_inline, JSC.toJSHostFn(${typeName}.${fn}), .{globalObject, callFrame}); } `; } @@ -2353,7 +2353,7 @@ const wrapGetterWithValueCallback = bun.gen_classes_lib.wrapGetterWithValueCallb pub const StaticGetterType = fn(*JSC.JSGlobalObject, JSC.JSValue, JSC.JSValue) callconv(JSC.conv) JSC.JSValue; pub const StaticSetterType = fn(*JSC.JSGlobalObject, JSC.JSValue, JSC.JSValue, JSC.JSValue) callconv(JSC.conv) bool; -pub const StaticCallbackType = JSC.JSHostFunctionType; +pub const StaticCallbackType = JSC.JSHostFn; pub const WriteBytesFn = *const fn(*anyopaque, ptr: [*]const u8, len: u32) callconv(JSC.conv) void; `; diff --git a/src/codegen/generate-js2native.ts b/src/codegen/generate-js2native.ts index c0fe8b0658..5116e1f390 100644 --- a/src/codegen/generate-js2native.ts +++ b/src/codegen/generate-js2native.ts @@ -227,7 +227,7 @@ export function getJS2NativeZig(gs2NativeZigPath: string) { filename: x.filename, })}(global: *JSC.JSGlobalObject, call_frame: *JSC.CallFrame) callconv(JSC.conv) JSC.JSValue {`, ` const function = @import(${JSON.stringify(path.relative(path.dirname(gs2NativeZigPath), x.filename))});`, - ` return @call(.always_inline, JSC.toJSHostFunction(function.${x.symbol_target}), .{global, call_frame});`, + ` return @call(.always_inline, JSC.toJSHostFn(function.${x.symbol_target}), .{global, call_frame});`, "}", ]), "comptime {", diff --git a/src/codegen/generate-node-errors.ts b/src/codegen/generate-node-errors.ts index 833827273c..766b7c9e89 100644 --- a/src/codegen/generate-node-errors.ts +++ b/src/codegen/generate-node-errors.ts @@ -92,7 +92,7 @@ for (let [code, constructor, name, ...other_constructors] of NodeErrors) { if (name == null) name = constructor.name; // it's useful to avoid the prefix, but module not found has a prefixed and unprefixed version - const codeWithoutPrefix = code === "ERR_MODULE_NOT_FOUND" ? code : code.replace(/^ERR_/, ""); + const codeWithoutPrefix = code === 'ERR_MODULE_NOT_FOUND' ? code : code.replace(/^ERR_/, ''); enumHeader += ` ${code} = ${i},\n`; listHeader += ` { JSC::ErrorType::${constructor.name}, "${name}"_s, "${code}"_s },\n`; diff --git a/src/compile_target.zig b/src/compile_target.zig index 98216036e3..58540642fe 100644 --- a/src/compile_target.zig +++ b/src/compile_target.zig @@ -297,7 +297,7 @@ pub fn downloadToPath(this: *const CompileTarget, env: *bun.DotEnv.Loader, alloc var did_retry = false; while (true) { - bun.C.moveFileZ(.fromStdDir(tmpdir), if (this.os == .windows) "bun.exe" else "bun", bun.invalid_fd, dest_z) catch |err| { + bun.sys.moveFileZ(.fromStdDir(tmpdir), if (this.os == .windows) "bun.exe" else "bun", bun.invalid_fd, dest_z) catch |err| { if (!did_retry) { did_retry = true; const dirname = bun.path.dirname(dest_z, .loose); diff --git a/src/copy_file.zig b/src/copy_file.zig index e3c37bc862..e8875a8f90 100644 --- a/src/copy_file.zig +++ b/src/copy_file.zig @@ -76,10 +76,10 @@ pub fn copyFileWithState(in: InputType, out: InputType, copy_file_state: *CopyFi if (can_use_ioctl_ficlone() and !copy_file_state.has_seen_exdev and !copy_file_state.has_ioctl_ficlone_failed) { // We only check once if the ioctl is supported, and cache the result. // EXT4 does not support FICLONE. - const rc = bun.C.linux.ioctl_ficlone(out, in); + const rc = bun.linux.ioctl_ficlone(out, in); // the ordering is flipped but it is consistent with other system calls. bun.sys.syslog("ioctl_ficlone({}, {}) = {d}", .{ in, out, rc }); - switch (bun.C.getErrno(rc)) { + switch (bun.sys.getErrno(rc)) { .SUCCESS => return CopyFileReturnType.success, .XDEV => { copy_file_state.has_seen_exdev = true; @@ -215,7 +215,7 @@ pub fn copyFileRange(in: fd_t, out: fd_t, len: usize, flags: u32, copy_file_stat while (true) { const rc = std.os.linux.copy_file_range(in, null, out, null, len, flags); bun.sys.syslog("copy_file_range({d}, {d}, {d}) = {d}", .{ in, out, len, rc }); - switch (bun.C.getErrno(rc)) { + switch (bun.sys.getErrno(rc)) { .SUCCESS => return .{ .result = @intCast(rc) }, // these may not be regular files, try fallback .INVAL => { @@ -246,7 +246,7 @@ pub fn copyFileRange(in: fd_t, out: fd_t, len: usize, flags: u32, copy_file_stat while (!copy_file_state.has_sendfile_failed) { const rc = std.os.linux.sendfile(@intCast(out), @intCast(in), null, len); bun.sys.syslog("sendfile({d}, {d}, {d}) = {d}", .{ in, out, len, rc }); - switch (bun.C.getErrno(rc)) { + switch (bun.sys.getErrno(rc)) { .SUCCESS => return .{ .result = @intCast(rc) }, .INTR => continue, // these may not be regular files, try fallback diff --git a/src/crash_handler.zig b/src/crash_handler.zig index bc66e14147..22405b017f 100644 --- a/src/crash_handler.zig +++ b/src/crash_handler.zig @@ -1488,7 +1488,7 @@ fn crash() noreturn { .windows => { // This exit code is what Node.js uses when it calls // abort. This is relied on by their Node-API tests. - bun.C.quick_exit(134); + bun.c.quick_exit(134); }, else => { // Install default handler so that the tkill below will terminate. diff --git a/src/create/SourceFileProjectGenerator.zig b/src/create/SourceFileProjectGenerator.zig index dc532afcc9..3ed1d12409 100644 --- a/src/create/SourceFileProjectGenerator.zig +++ b/src/create/SourceFileProjectGenerator.zig @@ -625,7 +625,7 @@ const strings = bun.strings; const MutableString = bun.MutableString; const stringZ = bun.stringZ; const default_allocator = bun.default_allocator; -const C = bun.C; + const std = @import("std"); const Progress = bun.Progress; diff --git a/src/csrf.zig b/src/csrf.zig index c150e35255..e787c201ee 100644 --- a/src/csrf.zig +++ b/src/csrf.zig @@ -299,7 +299,7 @@ pub fn csrf__generate_impl(globalObject: *JSC.JSGlobalObject, callframe: *JSC.Ca return encoding.toNodeEncoding().encodeWithMaxSize(globalObject, boring.EVP_MAX_MD_SIZE + 32, token_bytes); } -pub const csrf__generate: JSC.JSHostFunctionType = JSC.toJSHostFunction(csrf__generate_impl); +pub const csrf__generate = JSC.toJSHostFn(csrf__generate_impl); /// JS binding function for verifying CSRF tokens /// First argument is token (required), second is options (optional) @@ -385,4 +385,4 @@ pub fn csrf__verify_impl(globalObject: *JSC.JSGlobalObject, call_frame: *JSC.Cal return JSC.JSValue.jsBoolean(is_valid); } -pub const csrf__verify: JSC.JSHostFunctionType = JSC.toJSHostFunction(csrf__verify_impl); +pub const csrf__verify = JSC.toJSHostFn(csrf__verify_impl); diff --git a/src/css/css_internals.zig b/src/css/css_internals.zig index e9b69ffdc2..cd19de22d2 100644 --- a/src/css/css_internals.zig +++ b/src/css/css_internals.zig @@ -56,7 +56,7 @@ pub fn testingImpl(globalThis: *JSC.JSGlobalObject, callframe: *JSC.CallFrame, c const alloc = arena.allocator(); const arguments_ = callframe.arguments_old(3); - var arguments = JSC.Node.ArgumentsSlice.init(globalThis.bunVM(), arguments_.slice()); + var arguments = JSC.CallFrame.ArgumentsSlice.init(globalThis.bunVM(), arguments_.slice()); const source_arg: JSC.JSValue = arguments.nextEat() orelse { return globalThis.throw("minifyTestWithOptions: expected 2 arguments, got 0", .{}); }; @@ -267,7 +267,7 @@ pub fn attrTest(globalThis: *JSC.JSGlobalObject, callframe: *JSC.CallFrame) bun. const alloc = arena.allocator(); const arguments_ = callframe.arguments_old(4); - var arguments = JSC.Node.ArgumentsSlice.init(globalThis.bunVM(), arguments_.slice()); + var arguments = JSC.CallFrame.ArgumentsSlice.init(globalThis.bunVM(), arguments_.slice()); const source_arg: JSC.JSValue = arguments.nextEat() orelse { return globalThis.throw("attrTest: expected 3 arguments, got 0", .{}); }; diff --git a/src/css_scanner.zig b/src/css_scanner.zig index 8ef7c05352..3ff68e7435 100644 --- a/src/css_scanner.zig +++ b/src/css_scanner.zig @@ -12,7 +12,7 @@ const StoredFileDescriptorType = bun.StoredFileDescriptorType; const FeatureFlags = bun.FeatureFlags; const stringZ = bun.stringZ; const default_allocator = bun.default_allocator; -const C = bun.C; + const options = @import("./options.zig"); const import_record = @import("import_record.zig"); const logger = bun.logger; diff --git a/src/darwin.zig b/src/darwin.zig new file mode 100644 index 0000000000..ba214732ed --- /dev/null +++ b/src/darwin.zig @@ -0,0 +1,103 @@ +//! Platform specific APIs for Darwin/macOS +//! +//! If an API can be implemented on multiple platforms, +//! it does not belong in this namespace. + +/// Non-cancellable versions of various libc functions are undocumented +/// TODO: explain the $NOCANCEL problem +pub const nocancel = struct { + const c = std.c; + pub extern "c" fn @"recvfrom$NOCANCEL"(sockfd: c.fd_t, noalias buf: *anyopaque, len: usize, flags: u32, noalias src_addr: ?*c.sockaddr, noalias addrlen: ?*c.socklen_t) isize; + pub extern "c" fn @"sendto$NOCANCEL"(sockfd: c.fd_t, buf: *const anyopaque, len: usize, flags: u32, dest_addr: ?*const c.sockaddr, addrlen: c.socklen_t) isize; + pub extern "c" fn @"fcntl$NOCANCEL"(fd: c.fd_t, cmd: c_int, ...) c_int; + // pub extern "c" fn @"sendmsg$NOCANCEL"(sockfd: c.fd_t, msg: *const std.x.os.Socket.Message, flags: c_int) isize; + // pub extern "c" fn @"recvmsg$NOCANCEL"(sockfd: c.fd_t, msg: *std.x.os.Socket.Message, flags: c_int) isize; + pub extern "c" fn @"connect$NOCANCEL"(sockfd: c.fd_t, sock_addr: *const c.sockaddr, addrlen: c.socklen_t) c_int; + pub extern "c" fn @"accept$NOCANCEL"(sockfd: c.fd_t, noalias addr: ?*c.sockaddr, noalias addrlen: ?*c.socklen_t) c_int; + pub extern "c" fn @"accept4$NOCANCEL"(sockfd: c.fd_t, noalias addr: ?*c.sockaddr, noalias addrlen: ?*c.socklen_t, flags: c_uint) c_int; + pub extern "c" fn @"open$NOCANCEL"(path: [*:0]const u8, oflag: c_uint, ...) c_int; + // https://opensource.apple.com/source/xnu/xnu-7195.81.3/libsyscall/wrappers/open-base.c + pub extern "c" fn @"openat$NOCANCEL"(fd: c.fd_t, path: [*:0]const u8, oflag: c_uint, ...) c_int; + pub extern "c" fn @"read$NOCANCEL"(fd: c.fd_t, buf: [*]u8, nbyte: usize) isize; + pub extern "c" fn @"pread$NOCANCEL"(fd: c.fd_t, buf: [*]u8, nbyte: usize, offset: c.off_t) isize; + pub extern "c" fn @"preadv$NOCANCEL"(fd: c.fd_t, uf: [*]std.posix.iovec, count: i32, offset: c.off_t) isize; + pub extern "c" fn @"readv$NOCANCEL"(fd: c.fd_t, uf: [*]std.posix.iovec, count: i32) isize; + pub extern "c" fn @"write$NOCANCEL"(fd: c.fd_t, buf: [*]const u8, nbyte: usize) isize; + pub extern "c" fn @"writev$NOCANCEL"(fd: c.fd_t, buf: [*]const std.posix.iovec_const, count: i32) isize; + pub extern "c" fn @"pwritev$NOCANCEL"(fd: c.fd_t, buf: [*]const std.posix.iovec_const, count: i32, offset: c.off_t) isize; +}; + +pub const OSLog = opaque { + pub const Category = enum(u8) { + PointsOfInterest = 0, + Dynamicity = 1, + SizeAndThroughput = 2, + TimeProfile = 3, + SystemReporting = 4, + UserCustom = 5, + }; + + // Common subsystems that Instruments recognizes + pub const Subsystem = struct { + pub const Network = "com.apple.network"; + pub const FileIO = "com.apple.disk_io"; + pub const Graphics = "com.apple.graphics"; + pub const Memory = "com.apple.memory"; + pub const Performance = "com.apple.performance"; + }; + + extern "C" fn os_log_create(subsystem: ?[*:0]const u8, category: ?[*:0]const u8) ?*OSLog; + + pub fn init() ?*OSLog { + return os_log_create("com.bun.bun", "PointsOfInterest"); + } + + // anything except 0 and ~0 is a valid signpost id + var signpost_id_counter = std.atomic.Value(u64).init(1); + + pub fn signpost(log: *OSLog, name: i32) Signpost { + return .{ + .id = signpost_id_counter.fetchAdd(1, .monotonic), + .name = name, + .log = log, + }; + } + + const SignpostType = enum(u8) { + Event = 0, + IntervalBegin = 1, + IntervalEnd = 2, + }; + + pub extern "C" fn Bun__signpost_emit(log: *OSLog, id: u64, signpost_type: SignpostType, name: i32, category: u8) void; + + pub const Signpost = struct { + id: u64, + name: i32, + log: *OSLog, + + pub fn emit(this: *const Signpost, category: Category) void { + Bun__signpost_emit(this.log, this.id, .Event, this.name, @intFromEnum(category)); + } + + pub const Interval = struct { + signpost: Signpost, + category: Category, + + pub fn end(this: *const Interval) void { + Bun__signpost_emit(this.signpost.log, this.signpost.id, .IntervalEnd, this.signpost.name, @intFromEnum(this.category)); + } + }; + + pub fn interval(this: Signpost, category: Category) Interval { + Bun__signpost_emit(this.log, this.id, .IntervalBegin, this.name, @intFromEnum(category)); + return Interval{ + .signpost = this, + .category = category, + }; + } + }; +}; + +const std = @import("std"); +const bun = @import("bun"); diff --git a/src/darwin_c.zig b/src/darwin_c.zig deleted file mode 100644 index 0db5b30398..0000000000 --- a/src/darwin_c.zig +++ /dev/null @@ -1,769 +0,0 @@ -const std = @import("std"); -const bun = @import("bun"); -const builtin = @import("builtin"); -const posix = std.posix; -const mem = std.mem; -const Stat = std.fs.File.Stat; -const Kind = std.fs.File.Kind; -const StatError = std.fs.File.StatError; -const off_t = std.c.off_t; -const errno = posix.errno; -const zeroes = mem.zeroes; -pub extern "c" fn copyfile(from: [*:0]const u8, to: [*:0]const u8, state: ?std.c.copyfile_state_t, flags: u32) c_int; -pub const COPYFILE_STATE_SRC_FD = @as(c_int, 1); -pub const COPYFILE_STATE_SRC_FILENAME = @as(c_int, 2); -pub const COPYFILE_STATE_DST_FD = @as(c_int, 3); -pub const COPYFILE_STATE_DST_FILENAME = @as(c_int, 4); -pub const COPYFILE_STATE_QUARANTINE = @as(c_int, 5); -pub const COPYFILE_STATE_STATUS_CB = @as(c_int, 6); -pub const COPYFILE_STATE_STATUS_CTX = @as(c_int, 7); -pub const COPYFILE_STATE_COPIED = @as(c_int, 8); -pub const COPYFILE_STATE_XATTRNAME = @as(c_int, 9); -pub const COPYFILE_STATE_WAS_CLONED = @as(c_int, 10); -pub const COPYFILE_DISABLE_VAR = "COPYFILE_DISABLE"; -pub const COPYFILE_ACL = @as(c_int, 1) << @as(c_int, 0); -pub const COPYFILE_STAT = @as(c_int, 1) << @as(c_int, 1); -pub const COPYFILE_XATTR = @as(c_int, 1) << @as(c_int, 2); -pub const COPYFILE_DATA = @as(c_int, 1) << @as(c_int, 3); -pub const COPYFILE_SECURITY = COPYFILE_STAT | COPYFILE_ACL; -pub const COPYFILE_METADATA = COPYFILE_SECURITY | COPYFILE_XATTR; -pub const COPYFILE_ALL = COPYFILE_METADATA | COPYFILE_DATA; -/// Descend into hierarchies -pub const COPYFILE_RECURSIVE = @as(c_int, 1) << @as(c_int, 15); -/// return flags for xattr or acls if set -pub const COPYFILE_CHECK = @as(c_int, 1) << @as(c_int, 16); -/// fail if destination exists -pub const COPYFILE_EXCL = @as(c_int, 1) << @as(c_int, 17); -/// don't follow if source is a symlink -pub const COPYFILE_NOFOLLOW_SRC = @as(c_int, 1) << @as(c_int, 18); -/// don't follow if dst is a symlink -pub const COPYFILE_NOFOLLOW_DST = @as(c_int, 1) << @as(c_int, 19); -/// unlink src after copy -pub const COPYFILE_MOVE = @as(c_int, 1) << @as(c_int, 20); -/// unlink dst before copy -pub const COPYFILE_UNLINK = @as(c_int, 1) << @as(c_int, 21); -pub const COPYFILE_NOFOLLOW = COPYFILE_NOFOLLOW_SRC | COPYFILE_NOFOLLOW_DST; -pub const COPYFILE_PACK = @as(c_int, 1) << @as(c_int, 22); -pub const COPYFILE_UNPACK = @as(c_int, 1) << @as(c_int, 23); -pub const COPYFILE_CLONE = @as(c_int, 1) << @as(c_int, 24); -pub const COPYFILE_CLONE_FORCE = @as(c_int, 1) << @as(c_int, 25); -pub const COPYFILE_RUN_IN_PLACE = @as(c_int, 1) << @as(c_int, 26); -pub const COPYFILE_DATA_SPARSE = @as(c_int, 1) << @as(c_int, 27); -pub const COPYFILE_PRESERVE_DST_TRACKED = @as(c_int, 1) << @as(c_int, 28); -pub const COPYFILE_VERBOSE = @as(c_int, 1) << @as(c_int, 30); -pub const COPYFILE_RECURSE_ERROR = @as(c_int, 0); -pub const COPYFILE_RECURSE_FILE = @as(c_int, 1); -pub const COPYFILE_RECURSE_DIR = @as(c_int, 2); -pub const COPYFILE_RECURSE_DIR_CLEANUP = @as(c_int, 3); -pub const COPYFILE_COPY_DATA = @as(c_int, 4); -pub const COPYFILE_COPY_XATTR = @as(c_int, 5); -pub const COPYFILE_START = @as(c_int, 1); -pub const COPYFILE_FINISH = @as(c_int, 2); -pub const COPYFILE_ERR = @as(c_int, 3); -pub const COPYFILE_PROGRESS = @as(c_int, 4); -pub const COPYFILE_CONTINUE = @as(c_int, 0); -pub const COPYFILE_SKIP = @as(c_int, 1); -pub const COPYFILE_QUIT = @as(c_int, 2); - -pub extern "c" fn memmem(haystack: [*]const u8, haystacklen: usize, needle: [*]const u8, needlelen: usize) ?[*]const u8; - -// int clonefileat(int src_dirfd, const char * src, int dst_dirfd, const char * dst, int flags); -pub extern "c" fn clonefileat(c_int, [*:0]const u8, c_int, [*:0]const u8, uint32_t: c_int) c_int; -// int fclonefileat(int srcfd, int dst_dirfd, const char * dst, int flags); -pub extern "c" fn fclonefileat(c_int, c_int, [*:0]const u8, uint32_t: c_int) c_int; -// int clonefile(const char * src, const char * dst, int flags); -pub extern "c" fn clonefile(src: [*:0]const u8, dest: [*:0]const u8, flags: c_int) c_int; - -pub const lstat = blk: { - const T = *const fn (?[*:0]const u8, ?*bun.Stat) callconv(.C) c_int; - break :blk @extern(T, .{ .name = if (bun.Environment.isAarch64) "lstat" else "lstat64" }); -}; - -pub const fstat = blk: { - const T = *const fn (i32, ?*bun.Stat) callconv(.C) c_int; - break :blk @extern(T, .{ .name = if (bun.Environment.isAarch64) "fstat" else "fstat64" }); -}; -pub const stat = blk: { - const T = *const fn (?[*:0]const u8, ?*bun.Stat) callconv(.C) c_int; - break :blk @extern(T, .{ .name = if (bun.Environment.isAarch64) "stat" else "stat64" }); -}; -// benchmarking this did nothing on macOS -// i verified it wasn't returning -1 -pub fn preallocate_file(_: posix.fd_t, _: off_t, _: off_t) !void { - // pub const struct_fstore = extern struct { - // fst_flags: c_uint, - // fst_posmode: c_int, - // fst_offset: off_t, - // fst_length: off_t, - // fst_bytesalloc: off_t, - // }; - // pub const fstore_t = struct_fstore; - - // pub const F_ALLOCATECONTIG = @as(c_int, 0x00000002); - // pub const F_ALLOCATEALL = @as(c_int, 0x00000004); - // pub const F_PEOFPOSMODE = @as(c_int, 3); - // pub const F_VOLPOSMODE = @as(c_int, 4); - // var fstore = zeroes(fstore_t); - // fstore.fst_flags = F_ALLOCATECONTIG; - // fstore.fst_posmode = F_PEOFPOSMODE; - // fstore.fst_offset = 0; - // fstore.fst_length = len + offset; - - // // Based on https://api.kde.org/frameworks/kcoreaddons/html/posix__fallocate__mac_8h_source.html - // var rc = os.system.fcntl(fd, os.F.PREALLOCATE, &fstore); - - // switch (rc) { - // 0 => return, - // else => { - // fstore.fst_flags = F_ALLOCATEALL; - // rc = os.system.fcntl(fd, os.F.PREALLOCATE, &fstore); - // }, - // } - - // std.mem.doNotOptimizeAway(&fstore); -} - -pub const SystemErrno = enum(u8) { - SUCCESS = 0, - EPERM = 1, - ENOENT = 2, - ESRCH = 3, - EINTR = 4, - EIO = 5, - ENXIO = 6, - E2BIG = 7, - ENOEXEC = 8, - EBADF = 9, - ECHILD = 10, - EDEADLK = 11, - ENOMEM = 12, - EACCES = 13, - EFAULT = 14, - ENOTBLK = 15, - EBUSY = 16, - EEXIST = 17, - EXDEV = 18, - ENODEV = 19, - ENOTDIR = 20, - EISDIR = 21, - EINVAL = 22, - ENFILE = 23, - EMFILE = 24, - ENOTTY = 25, - ETXTBSY = 26, - EFBIG = 27, - ENOSPC = 28, - ESPIPE = 29, - EROFS = 30, - EMLINK = 31, - EPIPE = 32, - EDOM = 33, - ERANGE = 34, - EAGAIN = 35, - EINPROGRESS = 36, - EALREADY = 37, - ENOTSOCK = 38, - EDESTADDRREQ = 39, - EMSGSIZE = 40, - EPROTOTYPE = 41, - ENOPROTOOPT = 42, - EPROTONOSUPPORT = 43, - ESOCKTNOSUPPORT = 44, - ENOTSUP = 45, - EPFNOSUPPORT = 46, - EAFNOSUPPORT = 47, - EADDRINUSE = 48, - EADDRNOTAVAIL = 49, - ENETDOWN = 50, - ENETUNREACH = 51, - ENETRESET = 52, - ECONNABORTED = 53, - ECONNRESET = 54, - ENOBUFS = 55, - EISCONN = 56, - ENOTCONN = 57, - ESHUTDOWN = 58, - ETOOMANYREFS = 59, - ETIMEDOUT = 60, - ECONNREFUSED = 61, - ELOOP = 62, - ENAMETOOLONG = 63, - EHOSTDOWN = 64, - EHOSTUNREACH = 65, - ENOTEMPTY = 66, - EPROCLIM = 67, - EUSERS = 68, - EDQUOT = 69, - ESTALE = 70, - EREMOTE = 71, - EBADRPC = 72, - ERPCMISMATCH = 73, - EPROGUNAVAIL = 74, - EPROGMISMATCH = 75, - EPROCUNAVAIL = 76, - ENOLCK = 77, - ENOSYS = 78, - EFTYPE = 79, - EAUTH = 80, - ENEEDAUTH = 81, - EPWROFF = 82, - EDEVERR = 83, - EOVERFLOW = 84, - EBADEXEC = 85, - EBADARCH = 86, - ESHLIBVERS = 87, - EBADMACHO = 88, - ECANCELED = 89, - EIDRM = 90, - ENOMSG = 91, - EILSEQ = 92, - ENOATTR = 93, - EBADMSG = 94, - EMULTIHOP = 95, - ENODATA = 96, - ENOLINK = 97, - ENOSR = 98, - ENOSTR = 99, - EPROTO = 100, - ETIME = 101, - EOPNOTSUPP = 102, - ENOPOLICY = 103, - ENOTRECOVERABLE = 104, - EOWNERDEAD = 105, - EQFULL = 106, - - pub const max = 107; - - pub fn init(code: anytype) ?SystemErrno { - if (code < 0) { - if (code <= -max) { - return null; - } - return @enumFromInt(-code); - } - if (code >= max) return null; - return @enumFromInt(code); - } -}; - -pub const UV_E2BIG: i32 = @intFromEnum(SystemErrno.E2BIG); -pub const UV_EACCES: i32 = @intFromEnum(SystemErrno.EACCES); -pub const UV_EADDRINUSE: i32 = @intFromEnum(SystemErrno.EADDRINUSE); -pub const UV_EADDRNOTAVAIL: i32 = @intFromEnum(SystemErrno.EADDRNOTAVAIL); -pub const UV_EAFNOSUPPORT: i32 = @intFromEnum(SystemErrno.EAFNOSUPPORT); -pub const UV_EAGAIN: i32 = @intFromEnum(SystemErrno.EAGAIN); -pub const UV_EALREADY: i32 = @intFromEnum(SystemErrno.EALREADY); -pub const UV_EBADF: i32 = @intFromEnum(SystemErrno.EBADF); -pub const UV_EBUSY: i32 = @intFromEnum(SystemErrno.EBUSY); -pub const UV_ECANCELED: i32 = @intFromEnum(SystemErrno.ECANCELED); -pub const UV_ECHARSET: i32 = -bun.windows.libuv.UV__ECHARSET; -pub const UV_ECONNABORTED: i32 = @intFromEnum(SystemErrno.ECONNABORTED); -pub const UV_ECONNREFUSED: i32 = @intFromEnum(SystemErrno.ECONNREFUSED); -pub const UV_ECONNRESET: i32 = @intFromEnum(SystemErrno.ECONNRESET); -pub const UV_EDESTADDRREQ: i32 = @intFromEnum(SystemErrno.EDESTADDRREQ); -pub const UV_EEXIST: i32 = @intFromEnum(SystemErrno.EEXIST); -pub const UV_EFAULT: i32 = @intFromEnum(SystemErrno.EFAULT); -pub const UV_EHOSTUNREACH: i32 = @intFromEnum(SystemErrno.EHOSTUNREACH); -pub const UV_EINTR: i32 = @intFromEnum(SystemErrno.EINTR); -pub const UV_EINVAL: i32 = @intFromEnum(SystemErrno.EINVAL); -pub const UV_EIO: i32 = @intFromEnum(SystemErrno.EIO); -pub const UV_EISCONN: i32 = @intFromEnum(SystemErrno.EISCONN); -pub const UV_EISDIR: i32 = @intFromEnum(SystemErrno.EISDIR); -pub const UV_ELOOP: i32 = @intFromEnum(SystemErrno.ELOOP); -pub const UV_EMFILE: i32 = @intFromEnum(SystemErrno.EMFILE); -pub const UV_EMSGSIZE: i32 = @intFromEnum(SystemErrno.EMSGSIZE); -pub const UV_ENAMETOOLONG: i32 = @intFromEnum(SystemErrno.ENAMETOOLONG); -pub const UV_ENETDOWN: i32 = @intFromEnum(SystemErrno.ENETDOWN); -pub const UV_ENETUNREACH: i32 = @intFromEnum(SystemErrno.ENETUNREACH); -pub const UV_ENFILE: i32 = @intFromEnum(SystemErrno.ENFILE); -pub const UV_ENOBUFS: i32 = @intFromEnum(SystemErrno.ENOBUFS); -pub const UV_ENODEV: i32 = @intFromEnum(SystemErrno.ENODEV); -pub const UV_ENOENT: i32 = @intFromEnum(SystemErrno.ENOENT); -pub const UV_ENOMEM: i32 = @intFromEnum(SystemErrno.ENOMEM); -pub const UV_ENONET: i32 = -bun.windows.libuv.UV_ENONET; -pub const UV_ENOSPC: i32 = @intFromEnum(SystemErrno.ENOSPC); -pub const UV_ENOSYS: i32 = @intFromEnum(SystemErrno.ENOSYS); -pub const UV_ENOTCONN: i32 = @intFromEnum(SystemErrno.ENOTCONN); -pub const UV_ENOTDIR: i32 = @intFromEnum(SystemErrno.ENOTDIR); -pub const UV_ENOTEMPTY: i32 = @intFromEnum(SystemErrno.ENOTEMPTY); -pub const UV_ENOTSOCK: i32 = @intFromEnum(SystemErrno.ENOTSOCK); -pub const UV_ENOTSUP: i32 = @intFromEnum(SystemErrno.ENOTSUP); -pub const UV_EPERM: i32 = @intFromEnum(SystemErrno.EPERM); -pub const UV_EPIPE: i32 = @intFromEnum(SystemErrno.EPIPE); -pub const UV_EPROTO: i32 = @intFromEnum(SystemErrno.EPROTO); -pub const UV_EPROTONOSUPPORT: i32 = @intFromEnum(SystemErrno.EPROTONOSUPPORT); -pub const UV_EPROTOTYPE: i32 = @intFromEnum(SystemErrno.EPROTOTYPE); -pub const UV_EROFS: i32 = @intFromEnum(SystemErrno.EROFS); -pub const UV_ESHUTDOWN: i32 = @intFromEnum(SystemErrno.ESHUTDOWN); -pub const UV_ESPIPE: i32 = @intFromEnum(SystemErrno.ESPIPE); -pub const UV_ESRCH: i32 = @intFromEnum(SystemErrno.ESRCH); -pub const UV_ETIMEDOUT: i32 = @intFromEnum(SystemErrno.ETIMEDOUT); -pub const UV_ETXTBSY: i32 = @intFromEnum(SystemErrno.ETXTBSY); -pub const UV_EXDEV: i32 = @intFromEnum(SystemErrno.EXDEV); -pub const UV_EFBIG: i32 = @intFromEnum(SystemErrno.EFBIG); -pub const UV_ENOPROTOOPT: i32 = @intFromEnum(SystemErrno.ENOPROTOOPT); -pub const UV_ERANGE: i32 = @intFromEnum(SystemErrno.ERANGE); -pub const UV_ENXIO: i32 = @intFromEnum(SystemErrno.ENXIO); -pub const UV_EMLINK: i32 = @intFromEnum(SystemErrno.EMLINK); -pub const UV_EHOSTDOWN: i32 = @intFromEnum(SystemErrno.EHOSTDOWN); -pub const UV_EREMOTEIO: i32 = -bun.windows.libuv.UV_EREMOTEIO; -pub const UV_ENOTTY: i32 = @intFromEnum(SystemErrno.ENOTTY); -pub const UV_EFTYPE: i32 = @intFromEnum(SystemErrno.EFTYPE); -pub const UV_EILSEQ: i32 = @intFromEnum(SystemErrno.EILSEQ); -pub const UV_EOVERFLOW: i32 = @intFromEnum(SystemErrno.EOVERFLOW); -pub const UV_ESOCKTNOSUPPORT: i32 = @intFromEnum(SystemErrno.ESOCKTNOSUPPORT); -pub const UV_ENODATA: i32 = @intFromEnum(SystemErrno.ENODATA); -pub const UV_EUNATCH: i32 = -bun.windows.libuv.UV_EUNATCH; - -// Courtesy of https://github.com/nodejs/node/blob/master/deps/uv/src/unix/darwin-stub.h -pub const struct_CFArrayCallBacks = opaque {}; -pub const CFIndex = c_long; -pub const struct_CFRunLoopSourceContext = extern struct { - version: CFIndex, - info: ?*anyopaque, - pad: [7]?*anyopaque, - perform: ?*const fn (?*anyopaque) callconv(.C) void, -}; -pub const struct_FSEventStreamContext = extern struct { - version: CFIndex, - info: ?*anyopaque, - pad: [3]?*anyopaque, -}; -pub const struct_CFRange = extern struct { - location: CFIndex, - length: CFIndex, -}; -pub const CFAbsoluteTime = f64; -pub const CFTimeInterval = f64; -pub const FSEventStreamEventFlags = c_int; -pub const OSStatus = c_int; -pub const CFArrayCallBacks = struct_CFArrayCallBacks; -pub const CFRunLoopSourceContext = struct_CFRunLoopSourceContext; -pub const FSEventStreamContext = struct_FSEventStreamContext; -pub const FSEventStreamCreateFlags = u32; -pub const FSEventStreamEventId = u64; -pub const CFStringEncoding = c_uint; -pub const CFAllocatorRef = ?*anyopaque; -pub const CFArrayRef = ?*anyopaque; -pub const CFBundleRef = ?*anyopaque; -pub const CFDataRef = ?*anyopaque; -pub const CFDictionaryRef = ?*anyopaque; -pub const CFMutableDictionaryRef = ?*anyopaque; -pub const CFRange = struct_CFRange; -pub const CFRunLoopRef = ?*anyopaque; -pub const CFRunLoopSourceRef = ?*anyopaque; -pub const CFStringRef = ?*anyopaque; -pub const CFTypeRef = ?*anyopaque; -pub const FSEventStreamRef = ?*anyopaque; -pub const IOOptionBits = u32; -pub const io_iterator_t = c_uint; -pub const io_object_t = c_uint; -pub const io_service_t = c_uint; -pub const io_registry_entry_t = c_uint; -pub const FSEventStreamCallback = ?*const fn (FSEventStreamRef, ?*anyopaque, c_int, ?*anyopaque, [*c]const FSEventStreamEventFlags, [*c]const FSEventStreamEventId) callconv(.C) void; -pub const kCFStringEncodingUTF8: CFStringEncoding = @as(CFStringEncoding, @bitCast(@as(c_int, 134217984))); -pub const noErr: OSStatus = 0; -pub const kFSEventStreamEventIdSinceNow: FSEventStreamEventId = @as(FSEventStreamEventId, @bitCast(@as(c_longlong, -@as(c_int, 1)))); -pub const kFSEventStreamCreateFlagNoDefer: c_int = 2; -pub const kFSEventStreamCreateFlagFileEvents: c_int = 16; -pub const kFSEventStreamEventFlagEventIdsWrapped: c_int = 8; -pub const kFSEventStreamEventFlagHistoryDone: c_int = 16; -pub const kFSEventStreamEventFlagItemChangeOwner: c_int = 16384; -pub const kFSEventStreamEventFlagItemCreated: c_int = 256; -pub const kFSEventStreamEventFlagItemFinderInfoMod: c_int = 8192; -pub const kFSEventStreamEventFlagItemInodeMetaMod: c_int = 1024; -pub const kFSEventStreamEventFlagItemIsDir: c_int = 131072; -pub const kFSEventStreamEventFlagItemModified: c_int = 4096; -pub const kFSEventStreamEventFlagItemRemoved: c_int = 512; -pub const kFSEventStreamEventFlagItemRenamed: c_int = 2048; -pub const kFSEventStreamEventFlagItemXattrMod: c_int = 32768; -pub const kFSEventStreamEventFlagKernelDropped: c_int = 4; -pub const kFSEventStreamEventFlagMount: c_int = 64; -pub const kFSEventStreamEventFlagRootChanged: c_int = 32; -pub const kFSEventStreamEventFlagUnmount: c_int = 128; -pub const kFSEventStreamEventFlagUserDropped: c_int = 2; - -pub fn getFreeMemory() u64 { - return @extern(*const fn () callconv(.C) u64, .{ .name = "Bun__Os__getFreeMemory" })(); -} - -pub fn getTotalMemory() u64 { - var memory_: [32]c_ulonglong = undefined; - var size: usize = memory_.len; - - std.posix.sysctlbynameZ( - "hw.memsize", - &memory_, - &size, - null, - 0, - ) catch |err| switch (err) { - else => return 0, - }; - - return memory_[0]; -} - -pub fn getSystemUptime() u64 { - var boot_time: std.posix.timeval = undefined; - var size: usize = @sizeOf(@TypeOf(boot_time)); - - std.posix.sysctlbynameZ( - "kern.boottime", - &boot_time, - &size, - null, - 0, - ) catch |err| switch (err) { - else => return 0, - }; - - return @intCast(std.time.timestamp() - boot_time.sec); -} - -pub fn getSystemLoadavg() [3]f64 { - var loadavg: bun.c.struct_loadavg = undefined; - var size: usize = @sizeOf(@TypeOf(loadavg)); - - std.posix.sysctlbynameZ( - "vm.loadavg", - &loadavg, - &size, - null, - 0, - ) catch |err| switch (err) { - else => return [3]f64{ 0, 0, 0 }, - }; - - const scale: f64 = @floatFromInt(loadavg.fscale); - return .{ - if (scale == 0.0) 0 else @as(f64, @floatFromInt(loadavg.ldavg[0])) / scale, - if (scale == 0.0) 0 else @as(f64, @floatFromInt(loadavg.ldavg[1])) / scale, - if (scale == 0.0) 0 else @as(f64, @floatFromInt(loadavg.ldavg[2])) / scale, - }; -} - -pub const processor_flavor_t = c_int; - -// https://opensource.apple.com/source/xnu/xnu-792/osfmk/mach/processor_info.h.auto.html -pub const PROCESSOR_CPU_LOAD_INFO: processor_flavor_t = 2; -// https://opensource.apple.com/source/xnu/xnu-792/osfmk/mach/machine.h.auto.html -pub const CPU_STATE_MAX = 4; -pub const processor_cpu_load_info = extern struct { - cpu_ticks: [CPU_STATE_MAX]c_uint, -}; -pub const PROCESSOR_CPU_LOAD_INFO_COUNT = @as(std.c.mach_msg_type_number_t, @sizeOf(processor_cpu_load_info) / @sizeOf(std.c.natural_t)); -pub const processor_info_array_t = [*]c_int; -pub const PROCESSOR_INFO_MAX = 1024; - -pub extern fn host_processor_info(host: std.c.host_t, flavor: processor_flavor_t, out_processor_count: *std.c.natural_t, out_processor_info: *processor_info_array_t, out_processor_infoCnt: *std.c.mach_msg_type_number_t) std.c.E; - -pub extern fn getuid(...) std.posix.uid_t; -pub extern fn getgid(...) std.posix.gid_t; - -pub fn get_version(buf: []u8) []const u8 { - @memset(buf, 0); - - var size: usize = buf.len; - - if (std.c.sysctlbyname( - "kern.version", - buf.ptr, - &size, - null, - 0, - ) == -1) return "unknown"; - - return bun.sliceTo(buf, 0); -} - -pub fn get_release(buf: []u8) []const u8 { - @memset(buf, 0); - - var size: usize = buf.len; - - if (std.c.sysctlbyname( - "kern.osrelease", - buf.ptr, - &size, - null, - 0, - ) == -1) return "unknown"; - - return bun.sliceTo(buf, 0); -} - -pub const IOCPARM_MASK = @as(c_int, 0x1fff); -pub inline fn IOCPARM_LEN(x: anytype) @TypeOf((x >> @as(c_int, 16)) & IOCPARM_MASK) { - return (x >> @as(c_int, 16)) & IOCPARM_MASK; -} -pub inline fn IOCBASECMD(x: anytype) @TypeOf(x & ~(IOCPARM_MASK << @as(c_int, 16))) { - return x & ~(IOCPARM_MASK << @as(c_int, 16)); -} -pub inline fn IOCGROUP(x: anytype) @TypeOf((x >> @as(c_int, 8)) & @as(c_int, 0xff)) { - return (x >> @as(c_int, 8)) & @as(c_int, 0xff); -} -pub const IOCPARM_MAX = IOCPARM_MASK + @as(c_int, 1); -pub const IOC_VOID = @import("std").zig.c_translation.cast(u32, @import("std").zig.c_translation.promoteIntLiteral(c_int, 0x20000000, .hex)); -pub const IOC_OUT = @import("std").zig.c_translation.cast(u32, @import("std").zig.c_translation.promoteIntLiteral(c_int, 0x40000000, .hex)); -pub const IOC_IN = @import("std").zig.c_translation.cast(u32, @import("std").zig.c_translation.promoteIntLiteral(c_int, 0x80000000, .hex)); -pub const IOC_INOUT = IOC_IN | IOC_OUT; -pub const IOC_DIRMASK = @import("std").zig.c_translation.cast(u32, @import("std").zig.c_translation.promoteIntLiteral(c_int, 0xe0000000, .hex)); -pub inline fn _IOC(inout: anytype, group: anytype, num: anytype, len: anytype) @TypeOf(((inout | ((len & IOCPARM_MASK) << @as(c_int, 16))) | (group << @as(c_int, 8))) | num) { - return ((inout | ((len & IOCPARM_MASK) << @as(c_int, 16))) | (group << @as(c_int, 8))) | num; -} -pub inline fn _IO(g: anytype, n: anytype) @TypeOf(_IOC(IOC_VOID, g, n, @as(c_int, 0))) { - return _IOC(IOC_VOID, g, n, @as(c_int, 0)); -} -pub inline fn _IOR(g: anytype, n: anytype, t: anytype) @TypeOf(_IOC(IOC_OUT, g, n, @import("std").zig.c_translation.sizeof(t))) { - return _IOC(IOC_OUT, g, n, @import("std").zig.c_translation.sizeof(t)); -} -pub inline fn _IOW(g: anytype, n: anytype, t: anytype) @TypeOf(_IOC(IOC_IN, g, n, @import("std").zig.c_translation.sizeof(t))) { - return _IOC(IOC_IN, g, n, @import("std").zig.c_translation.sizeof(t)); -} -pub inline fn _IOWR(g: anytype, n: anytype, t: anytype) @TypeOf(_IOC(IOC_INOUT, g, n, @import("std").zig.c_translation.sizeof(t))) { - return _IOC(IOC_INOUT, g, n, @import("std").zig.c_translation.sizeof(t)); -} -pub const TIOCMODG = _IOR('t', @as(c_int, 3), c_int); -pub const TIOCMODS = _IOW('t', @as(c_int, 4), c_int); -pub const TIOCM_LE = @as(c_int, 0o001); -pub const TIOCM_DTR = @as(c_int, 0o002); -pub const TIOCM_RTS = @as(c_int, 0o004); -pub const TIOCM_ST = @as(c_int, 0o010); -pub const TIOCM_SR = @as(c_int, 0o020); -pub const TIOCM_CTS = @as(c_int, 0o040); -pub const TIOCM_CAR = @as(c_int, 0o100); -pub const TIOCM_CD = TIOCM_CAR; -pub const TIOCM_RNG = @as(c_int, 0o200); -pub const TIOCM_RI = TIOCM_RNG; -pub const TIOCM_DSR = @as(c_int, 0o400); -pub const TIOCEXCL = _IO('t', @as(c_int, 13)); -pub const TIOCNXCL = _IO('t', @as(c_int, 14)); -pub const TIOCFLUSH = _IOW('t', @as(c_int, 16), c_int); -pub const TIOCGETD = _IOR('t', @as(c_int, 26), c_int); -pub const TIOCSETD = _IOW('t', @as(c_int, 27), c_int); -pub const TIOCIXON = _IO('t', @as(c_int, 129)); -pub const TIOCIXOFF = _IO('t', @as(c_int, 128)); -pub const TIOCSBRK = _IO('t', @as(c_int, 123)); -pub const TIOCCBRK = _IO('t', @as(c_int, 122)); -pub const TIOCSDTR = _IO('t', @as(c_int, 121)); -pub const TIOCCDTR = _IO('t', @as(c_int, 120)); -pub const TIOCGPGRP = _IOR('t', @as(c_int, 119), c_int); -pub const TIOCSPGRP = _IOW('t', @as(c_int, 118), c_int); -pub const TIOCOUTQ = _IOR('t', @as(c_int, 115), c_int); -pub const TIOCSTI = _IOW('t', @as(c_int, 114), u8); -pub const TIOCNOTTY = _IO('t', @as(c_int, 113)); -pub const TIOCPKT = _IOW('t', @as(c_int, 112), c_int); -pub const TIOCPKT_DATA = @as(c_int, 0x00); -pub const TIOCPKT_FLUSHREAD = @as(c_int, 0x01); -pub const TIOCPKT_FLUSHWRITE = @as(c_int, 0x02); -pub const TIOCPKT_STOP = @as(c_int, 0x04); -pub const TIOCPKT_START = @as(c_int, 0x08); -pub const TIOCPKT_NOSTOP = @as(c_int, 0x10); -pub const TIOCPKT_DOSTOP = @as(c_int, 0x20); -pub const TIOCPKT_IOCTL = @as(c_int, 0x40); -pub const TIOCSTOP = _IO('t', @as(c_int, 111)); -pub const TIOCSTART = _IO('t', @as(c_int, 110)); -pub const TIOCMSET = _IOW('t', @as(c_int, 109), c_int); -pub const TIOCMBIS = _IOW('t', @as(c_int, 108), c_int); -pub const TIOCMBIC = _IOW('t', @as(c_int, 107), c_int); -pub const TIOCMGET = _IOR('t', @as(c_int, 106), c_int); -// pub const TIOCGWINSZ = _IOR('t', @as(c_int, 104), struct_winsize); -// pub const TIOCSWINSZ = _IOW('t', @as(c_int, 103), struct_winsize); -pub const TIOCUCNTL = _IOW('t', @as(c_int, 102), c_int); -pub const TIOCSTAT = _IO('t', @as(c_int, 101)); -pub inline fn UIOCCMD(n: anytype) @TypeOf(_IO('u', n)) { - return _IO('u', n); -} -pub const TIOCSCONS = _IO('t', @as(c_int, 99)); -pub const TIOCCONS = _IOW('t', @as(c_int, 98), c_int); -pub const TIOCSCTTY = _IO('t', @as(c_int, 97)); -pub const TIOCEXT = _IOW('t', @as(c_int, 96), c_int); -pub const TIOCSIG = _IO('t', @as(c_int, 95)); -pub const TIOCDRAIN = _IO('t', @as(c_int, 94)); -pub const TIOCMSDTRWAIT = _IOW('t', @as(c_int, 91), c_int); -pub const TIOCMGDTRWAIT = _IOR('t', @as(c_int, 90), c_int); -pub const TIOCSDRAINWAIT = _IOW('t', @as(c_int, 87), c_int); -pub const TIOCGDRAINWAIT = _IOR('t', @as(c_int, 86), c_int); -pub const TIOCDSIMICROCODE = _IO('t', @as(c_int, 85)); -pub const TIOCPTYGRANT = _IO('t', @as(c_int, 84)); -pub const TIOCPTYGNAME = _IOC(IOC_OUT, 't', @as(c_int, 83), @as(c_int, 128)); -pub const TIOCPTYUNLK = _IO('t', @as(c_int, 82)); -pub const TTYDISC = @as(c_int, 0); -pub const TABLDISC = @as(c_int, 3); -pub const SLIPDISC = @as(c_int, 4); -pub const PPPDISC = @as(c_int, 5); -// pub const TIOCGSIZE = TIOCGWINSZ; -// pub const TIOCSSIZE = TIOCSWINSZ; -pub const FIOCLEX = _IO('f', @as(c_int, 1)); -pub const FIONCLEX = _IO('f', @as(c_int, 2)); -pub const FIONREAD = _IOR('f', @as(c_int, 127), c_int); -pub const FIONBIO = _IOW('f', @as(c_int, 126), c_int); -pub const FIOASYNC = _IOW('f', @as(c_int, 125), c_int); -pub const FIOSETOWN = _IOW('f', @as(c_int, 124), c_int); -pub const FIOGETOWN = _IOR('f', @as(c_int, 123), c_int); -pub const FIODTYPE = _IOR('f', @as(c_int, 122), c_int); -pub const SIOCSHIWAT = _IOW('s', @as(c_int, 0), c_int); -pub const SIOCGHIWAT = _IOR('s', @as(c_int, 1), c_int); -pub const SIOCSLOWAT = _IOW('s', @as(c_int, 2), c_int); -pub const SIOCGLOWAT = _IOR('s', @as(c_int, 3), c_int); -pub const SIOCATMARK = _IOR('s', @as(c_int, 7), c_int); -pub const SIOCSPGRP = _IOW('s', @as(c_int, 8), c_int); -pub const SIOCGPGRP = _IOR('s', @as(c_int, 9), c_int); -// pub const SIOCSETVLAN = SIOCSIFVLAN; -// pub const SIOCGETVLAN = SIOCGIFVLAN; - -// As of Zig v0.11.0-dev.1393+38eebf3c4, ifaddrs.h is not included in the headers -pub const ifaddrs = extern struct { - ifa_next: ?*ifaddrs, - ifa_name: [*:0]u8, - ifa_flags: c_uint, - ifa_addr: ?*std.posix.sockaddr, - ifa_netmask: ?*std.posix.sockaddr, - ifa_dstaddr: ?*std.posix.sockaddr, - ifa_data: *anyopaque, -}; -pub extern fn getifaddrs(*?*ifaddrs) c_int; -pub extern fn freeifaddrs(?*ifaddrs) void; - -pub const IFF_RUNNING = bun.c.IFF_RUNNING; -pub const IFF_UP = bun.c.IFF_UP; -pub const IFF_LOOPBACK = bun.c.IFF_LOOPBACK; -pub const sockaddr_dl = extern struct { - sdl_len: u8, // Total length of sockaddr */ - sdl_family: u8, // AF_LINK */ - sdl_index: u16, // if != 0, system given index for interface */ - sdl_type: u8, // interface type */ - sdl_nlen: u8, // interface name length, no trailing 0 reqd. */ - sdl_alen: u8, // link level address length */ - sdl_slen: u8, // link layer selector length */ - sdl_data: [12]u8, // minimum work area, can be larger; contains both if name and ll address */ - //#ifndef __APPLE__ - // /* For TokenRing */ - // u_short sdl_rcf; /* source routing control */ - // u_short sdl_route[16]; /* source routing information */ - //#endif -}; - -pub const F = struct { - pub const DUPFD_CLOEXEC = bun.c.F_DUPFD_CLOEXEC; - pub const DUPFD = bun.c.F_DUPFD; -}; - -// it turns out preallocating on APFS on an M1 is slower. -// so this is a linux-only optimization for now. -pub const preallocate_length = std.math.maxInt(u51); - -pub const Mode = std.posix.mode_t; - -pub const E = std.posix.E; -pub const S = std.posix.S; - -pub fn getErrno(rc: anytype) E { - if (rc == -1) { - return @enumFromInt(std.c._errno().*); - } else { - return .SUCCESS; - } -} - -pub extern "c" fn umask(Mode) Mode; - -// #define RENAME_SECLUDE 0x00000001 -// #define RENAME_SWAP 0x00000002 -// #define RENAME_EXCL 0x00000004 -// #define RENAME_RESERVED1 0x00000008 -// #define RENAME_NOFOLLOW_ANY 0x00000010 -pub const RENAME_SECLUDE = 0x00000001; -pub const RENAME_SWAP = 0x00000002; -pub const RENAME_EXCL = 0x00000004; -pub const RENAME_RESERVED1 = 0x00000008; -pub const RENAME_NOFOLLOW_ANY = 0x00000010; - -// int renameatx_np(int fromfd, const char *from, int tofd, const char *to, unsigned int flags); -pub extern "c" fn renameatx_np(fromfd: c_int, from: ?[*:0]const u8, tofd: c_int, to: ?[*:0]const u8, flags: c_uint) c_int; - -pub const CLOCK_REALTIME = 0; -pub const CLOCK_MONOTONIC = 6; -pub const CLOCK_MONOTONIC_RAW = 4; -pub const CLOCK_MONOTONIC_RAW_APPROX = 5; -pub const CLOCK_UPTIME_RAW = 8; -pub const CLOCK_UPTIME_RAW_APPROX = 9; -pub const CLOCK_PROCESS_CPUTIME_ID = 12; -pub const CLOCK_THREAD_CPUTIME_ID = 1; - -pub extern fn memset_pattern4(buf: [*]u8, pattern: [*]const u8, len: usize) void; -pub extern fn memset_pattern8(buf: [*]u8, pattern: [*]const u8, len: usize) void; -pub extern fn memset_pattern16(buf: [*]u8, pattern: [*]const u8, len: usize) void; - -pub const OSLog = opaque { - pub const Category = enum(u8) { - PointsOfInterest = 0, - Dynamicity = 1, - SizeAndThroughput = 2, - TimeProfile = 3, - SystemReporting = 4, - UserCustom = 5, - }; - - // Common subsystems that Instruments recognizes - pub const Subsystem = struct { - pub const Network = "com.apple.network"; - pub const FileIO = "com.apple.disk_io"; - pub const Graphics = "com.apple.graphics"; - pub const Memory = "com.apple.memory"; - pub const Performance = "com.apple.performance"; - }; - - extern "C" fn os_log_create(subsystem: ?[*:0]const u8, category: ?[*:0]const u8) ?*OSLog; - - pub fn init() ?*OSLog { - return os_log_create("com.bun.bun", "PointsOfInterest"); - } - - // anything except 0 and ~0 is a valid signpost id - var signpost_id_counter = std.atomic.Value(u64).init(1); - - pub fn signpost(log: *OSLog, name: i32) Signpost { - return .{ - .id = signpost_id_counter.fetchAdd(1, .monotonic), - .name = name, - .log = log, - }; - } - - const SignpostType = enum(u8) { - Event = 0, - IntervalBegin = 1, - IntervalEnd = 2, - }; - - pub extern "C" fn Bun__signpost_emit(log: *OSLog, id: u64, signpost_type: SignpostType, name: i32, category: u8) void; - - pub const Signpost = struct { - id: u64, - name: i32, - log: *OSLog, - - pub fn emit(this: *const Signpost, category: Category) void { - Bun__signpost_emit(this.log, this.id, .Event, this.name, @intFromEnum(category)); - } - - pub const Interval = struct { - signpost: Signpost, - category: Category, - - pub fn end(this: *const Interval) void { - Bun__signpost_emit(this.signpost.log, this.signpost.id, .IntervalEnd, this.signpost.name, @intFromEnum(this.category)); - } - }; - - pub fn interval(this: Signpost, category: Category) Interval { - Bun__signpost_emit(this.log, this.id, .IntervalBegin, this.name, @intFromEnum(category)); - return Interval{ - .signpost = this, - .category = category, - }; - } - }; -}; diff --git a/src/defines-table.zig b/src/defines-table.zig index a67f6d3957..449af08581 100644 --- a/src/defines-table.zig +++ b/src/defines-table.zig @@ -7,7 +7,7 @@ const strings = bun.strings; const MutableString = bun.MutableString; const stringZ = bun.stringZ; const default_allocator = bun.default_allocator; -const C = bun.C; + const std = @import("std"); const defines = @import("./defines.zig"); diff --git a/src/defines.zig b/src/defines.zig index 6926b304d1..55330b5758 100644 --- a/src/defines.zig +++ b/src/defines.zig @@ -13,7 +13,7 @@ const strings = bun.strings; const MutableString = bun.MutableString; const stringZ = bun.stringZ; const default_allocator = bun.default_allocator; -const C = bun.C; + const Ref = @import("ast/base.zig").Ref; const GlobalDefinesKey = @import("./defines-table.zig").GlobalDefinesKey; diff --git a/src/deps/c_ares.zig b/src/deps/c_ares.zig index 71584fac52..867828f05a 100644 --- a/src/deps/c_ares.zig +++ b/src/deps/c_ares.zig @@ -1999,7 +1999,7 @@ pub const ares_addr_node = struct_ares_addr_node; pub const ares_addr_port_node = struct_ares_addr_port_node; comptime { - const Bun__canonicalizeIP = JSC.toJSHostFunction(Bun__canonicalizeIP_); + const Bun__canonicalizeIP = JSC.toJSHostFn(Bun__canonicalizeIP_); @export(&Bun__canonicalizeIP, .{ .name = "Bun__canonicalizeIP" }); } pub fn Bun__canonicalizeIP_(globalThis: *JSC.JSGlobalObject, callframe: *JSC.CallFrame) bun.JSError!JSC.JSValue { @@ -2014,7 +2014,7 @@ pub fn Bun__canonicalizeIP_(globalThis: *JSC.JSGlobalObject, callframe: *JSC.Cal const INET6_ADDRSTRLEN = if (comptime bun.Environment.isWindows) 65 else 46; const script_ctx = globalThis.bunVM(); - var args = JSC.Node.ArgumentsSlice.init(script_ctx, arguments.slice()); + var args = JSC.CallFrame.ArgumentsSlice.init(script_ctx, arguments.slice()); const addr_arg = args.nextEat().?; const addr = try bun.String.fromJS(addr_arg, globalThis); diff --git a/src/deps/libuv.zig b/src/deps/libuv.zig index 3906c6e536..3d4cf4330a 100644 --- a/src/deps/libuv.zig +++ b/src/deps/libuv.zig @@ -675,7 +675,7 @@ pub const Loop = extern struct { return loop_alive; } - pub fn init(ptr: *Loop) ?bun.C.E { + pub fn init(ptr: *Loop) ?bun.sys.E { if (uv_loop_init(ptr).errEnum()) |err| return err; return null; } @@ -2716,78 +2716,78 @@ pub fn uv_is_closed(handle: *const uv_handle_t) bool { return (handle.flags & UV_HANDLE_CLOSED != 0); } -pub fn translateUVErrorToE(code_in: anytype) bun.C.E { +pub fn translateUVErrorToE(code_in: anytype) bun.sys.E { const code: c_int = @intCast(code_in); return switch (code) { - UV_EPERM => bun.C.E.PERM, - UV_ENOENT => bun.C.E.NOENT, - UV_ESRCH => bun.C.E.SRCH, - UV_EINTR => bun.C.E.INTR, - UV_EIO => bun.C.E.IO, - UV_ENXIO => bun.C.E.NXIO, - UV_E2BIG => bun.C.E.@"2BIG", - UV_EBADF => bun.C.E.BADF, - UV_EAGAIN => bun.C.E.AGAIN, - UV_ENOMEM => bun.C.E.NOMEM, - UV_EACCES => bun.C.E.ACCES, - UV_EFAULT => bun.C.E.FAULT, - UV_EBUSY => bun.C.E.BUSY, - UV_EEXIST => bun.C.E.EXIST, - UV_EXDEV => bun.C.E.XDEV, - UV_ENODEV => bun.C.E.NODEV, - UV_ENOTDIR => bun.C.E.NOTDIR, - UV_EISDIR => bun.C.E.ISDIR, - UV_EINVAL => bun.C.E.INVAL, - UV_ENFILE => bun.C.E.NFILE, - UV_EMFILE => bun.C.E.MFILE, - UV_ENOTTY => bun.C.E.NOTTY, - UV_ETXTBSY => bun.C.E.TXTBSY, - UV_EFBIG => bun.C.E.FBIG, - UV_ENOSPC => bun.C.E.NOSPC, - UV_ESPIPE => bun.C.E.SPIPE, - UV_EROFS => bun.C.E.ROFS, - UV_EMLINK => bun.C.E.MLINK, - UV_EPIPE => bun.C.E.PIPE, - UV_ERANGE => bun.C.E.RANGE, - UV_ENAMETOOLONG => bun.C.E.NAMETOOLONG, - UV_ENOSYS => bun.C.E.NOSYS, - UV_ENOTEMPTY => bun.C.E.NOTEMPTY, - UV_ELOOP => bun.C.E.LOOP, - UV_EUNATCH => bun.C.E.UNATCH, - UV_ENODATA => bun.C.E.NODATA, - UV_ENONET => bun.C.E.NONET, - UV_EPROTO => bun.C.E.PROTO, - UV_EOVERFLOW => bun.C.E.OVERFLOW, - UV_EILSEQ => bun.C.E.ILSEQ, - UV_ENOTSOCK => bun.C.E.NOTSOCK, - UV_EDESTADDRREQ => bun.C.E.DESTADDRREQ, - UV_EMSGSIZE => bun.C.E.MSGSIZE, - UV_EPROTOTYPE => bun.C.E.PROTOTYPE, - UV_ENOPROTOOPT => bun.C.E.NOPROTOOPT, - UV_EPROTONOSUPPORT => bun.C.E.PROTONOSUPPORT, - UV_ESOCKTNOSUPPORT => bun.C.E.SOCKTNOSUPPORT, - UV_ENOTSUP => bun.C.E.NOTSUP, - UV_EAFNOSUPPORT => bun.C.E.AFNOSUPPORT, - UV_EADDRINUSE => bun.C.E.ADDRINUSE, - UV_EADDRNOTAVAIL => bun.C.E.ADDRNOTAVAIL, - UV_ENETDOWN => bun.C.E.NETDOWN, - UV_ENETUNREACH => bun.C.E.NETUNREACH, - UV_ECONNABORTED => bun.C.E.CONNABORTED, - UV_ECONNRESET => bun.C.E.CONNRESET, - UV_ENOBUFS => bun.C.E.NOBUFS, - UV_EISCONN => bun.C.E.ISCONN, - UV_ENOTCONN => bun.C.E.NOTCONN, - UV_ESHUTDOWN => bun.C.E.SHUTDOWN, - UV_ETIMEDOUT => bun.C.E.TIMEDOUT, - UV_ECONNREFUSED => bun.C.E.CONNREFUSED, - UV_EHOSTDOWN => bun.C.E.HOSTDOWN, - UV_EHOSTUNREACH => bun.C.E.HOSTUNREACH, - UV_EALREADY => bun.C.E.ALREADY, - UV_EREMOTEIO => bun.C.E.REMOTEIO, - UV_ECANCELED => bun.C.E.CANCELED, - UV_ECHARSET => bun.C.E.CHARSET, - UV_EOF => bun.C.E.EOF, + UV_EPERM => bun.sys.E.PERM, + UV_ENOENT => bun.sys.E.NOENT, + UV_ESRCH => bun.sys.E.SRCH, + UV_EINTR => bun.sys.E.INTR, + UV_EIO => bun.sys.E.IO, + UV_ENXIO => bun.sys.E.NXIO, + UV_E2BIG => bun.sys.E.@"2BIG", + UV_EBADF => bun.sys.E.BADF, + UV_EAGAIN => bun.sys.E.AGAIN, + UV_ENOMEM => bun.sys.E.NOMEM, + UV_EACCES => bun.sys.E.ACCES, + UV_EFAULT => bun.sys.E.FAULT, + UV_EBUSY => bun.sys.E.BUSY, + UV_EEXIST => bun.sys.E.EXIST, + UV_EXDEV => bun.sys.E.XDEV, + UV_ENODEV => bun.sys.E.NODEV, + UV_ENOTDIR => bun.sys.E.NOTDIR, + UV_EISDIR => bun.sys.E.ISDIR, + UV_EINVAL => bun.sys.E.INVAL, + UV_ENFILE => bun.sys.E.NFILE, + UV_EMFILE => bun.sys.E.MFILE, + UV_ENOTTY => bun.sys.E.NOTTY, + UV_ETXTBSY => bun.sys.E.TXTBSY, + UV_EFBIG => bun.sys.E.FBIG, + UV_ENOSPC => bun.sys.E.NOSPC, + UV_ESPIPE => bun.sys.E.SPIPE, + UV_EROFS => bun.sys.E.ROFS, + UV_EMLINK => bun.sys.E.MLINK, + UV_EPIPE => bun.sys.E.PIPE, + UV_ERANGE => bun.sys.E.RANGE, + UV_ENAMETOOLONG => bun.sys.E.NAMETOOLONG, + UV_ENOSYS => bun.sys.E.NOSYS, + UV_ENOTEMPTY => bun.sys.E.NOTEMPTY, + UV_ELOOP => bun.sys.E.LOOP, + UV_EUNATCH => bun.sys.E.UNATCH, + UV_ENODATA => bun.sys.E.NODATA, + UV_ENONET => bun.sys.E.NONET, + UV_EPROTO => bun.sys.E.PROTO, + UV_EOVERFLOW => bun.sys.E.OVERFLOW, + UV_EILSEQ => bun.sys.E.ILSEQ, + UV_ENOTSOCK => bun.sys.E.NOTSOCK, + UV_EDESTADDRREQ => bun.sys.E.DESTADDRREQ, + UV_EMSGSIZE => bun.sys.E.MSGSIZE, + UV_EPROTOTYPE => bun.sys.E.PROTOTYPE, + UV_ENOPROTOOPT => bun.sys.E.NOPROTOOPT, + UV_EPROTONOSUPPORT => bun.sys.E.PROTONOSUPPORT, + UV_ESOCKTNOSUPPORT => bun.sys.E.SOCKTNOSUPPORT, + UV_ENOTSUP => bun.sys.E.NOTSUP, + UV_EAFNOSUPPORT => bun.sys.E.AFNOSUPPORT, + UV_EADDRINUSE => bun.sys.E.ADDRINUSE, + UV_EADDRNOTAVAIL => bun.sys.E.ADDRNOTAVAIL, + UV_ENETDOWN => bun.sys.E.NETDOWN, + UV_ENETUNREACH => bun.sys.E.NETUNREACH, + UV_ECONNABORTED => bun.sys.E.CONNABORTED, + UV_ECONNRESET => bun.sys.E.CONNRESET, + UV_ENOBUFS => bun.sys.E.NOBUFS, + UV_EISCONN => bun.sys.E.ISCONN, + UV_ENOTCONN => bun.sys.E.NOTCONN, + UV_ESHUTDOWN => bun.sys.E.SHUTDOWN, + UV_ETIMEDOUT => bun.sys.E.TIMEDOUT, + UV_ECONNREFUSED => bun.sys.E.CONNREFUSED, + UV_EHOSTDOWN => bun.sys.E.HOSTDOWN, + UV_EHOSTUNREACH => bun.sys.E.HOSTUNREACH, + UV_EALREADY => bun.sys.E.ALREADY, + UV_EREMOTEIO => bun.sys.E.REMOTEIO, + UV_ECANCELED => bun.sys.E.CANCELED, + UV_ECHARSET => bun.sys.E.CHARSET, + UV_EOF => bun.sys.E.EOF, else => @enumFromInt(-code), }; } @@ -2822,84 +2822,84 @@ pub const ReturnCode = enum(c_int) { return null; } - pub inline fn errno(this: ReturnCode) ?@TypeOf(@intFromEnum(bun.C.E.ACCES)) { + pub inline fn errno(this: ReturnCode) ?@TypeOf(@intFromEnum(bun.sys.E.ACCES)) { return if (this.int() < 0) switch (this.int()) { - UV_EPERM => @intFromEnum(bun.C.E.PERM), - UV_ENOENT => @intFromEnum(bun.C.E.NOENT), - UV_ESRCH => @intFromEnum(bun.C.E.SRCH), - UV_EINTR => @intFromEnum(bun.C.E.INTR), - UV_EIO => @intFromEnum(bun.C.E.IO), - UV_ENXIO => @intFromEnum(bun.C.E.NXIO), - UV_E2BIG => @intFromEnum(bun.C.E.@"2BIG"), - UV_EBADF => @intFromEnum(bun.C.E.BADF), - UV_EAGAIN => @intFromEnum(bun.C.E.AGAIN), - UV_ENOMEM => @intFromEnum(bun.C.E.NOMEM), - UV_EACCES => @intFromEnum(bun.C.E.ACCES), - UV_EFAULT => @intFromEnum(bun.C.E.FAULT), - UV_EBUSY => @intFromEnum(bun.C.E.BUSY), - UV_EEXIST => @intFromEnum(bun.C.E.EXIST), - UV_EXDEV => @intFromEnum(bun.C.E.XDEV), - UV_ENODEV => @intFromEnum(bun.C.E.NODEV), - UV_ENOTDIR => @intFromEnum(bun.C.E.NOTDIR), - UV_EISDIR => @intFromEnum(bun.C.E.ISDIR), - UV_EINVAL => @intFromEnum(bun.C.E.INVAL), - UV_ENFILE => @intFromEnum(bun.C.E.NFILE), - UV_EMFILE => @intFromEnum(bun.C.E.MFILE), - UV_ENOTTY => @intFromEnum(bun.C.E.NOTTY), - UV_ETXTBSY => @intFromEnum(bun.C.E.TXTBSY), - UV_EFBIG => @intFromEnum(bun.C.E.FBIG), - UV_ENOSPC => @intFromEnum(bun.C.E.NOSPC), - UV_ESPIPE => @intFromEnum(bun.C.E.SPIPE), - UV_EROFS => @intFromEnum(bun.C.E.ROFS), - UV_EMLINK => @intFromEnum(bun.C.E.MLINK), - UV_EPIPE => @intFromEnum(bun.C.E.PIPE), - UV_ERANGE => @intFromEnum(bun.C.E.RANGE), - UV_ENAMETOOLONG => @intFromEnum(bun.C.E.NAMETOOLONG), - UV_ENOSYS => @intFromEnum(bun.C.E.NOSYS), - UV_ENOTEMPTY => @intFromEnum(bun.C.E.NOTEMPTY), - UV_ELOOP => @intFromEnum(bun.C.E.LOOP), - UV_EUNATCH => @intFromEnum(bun.C.E.UNATCH), - UV_ENODATA => @intFromEnum(bun.C.E.NODATA), - UV_ENONET => @intFromEnum(bun.C.E.NONET), - UV_EPROTO => @intFromEnum(bun.C.E.PROTO), - UV_EOVERFLOW => @intFromEnum(bun.C.E.OVERFLOW), - UV_EILSEQ => @intFromEnum(bun.C.E.ILSEQ), - UV_ENOTSOCK => @intFromEnum(bun.C.E.NOTSOCK), - UV_EDESTADDRREQ => @intFromEnum(bun.C.E.DESTADDRREQ), - UV_EMSGSIZE => @intFromEnum(bun.C.E.MSGSIZE), - UV_EPROTOTYPE => @intFromEnum(bun.C.E.PROTOTYPE), - UV_ENOPROTOOPT => @intFromEnum(bun.C.E.NOPROTOOPT), - UV_EPROTONOSUPPORT => @intFromEnum(bun.C.E.PROTONOSUPPORT), - UV_ESOCKTNOSUPPORT => @intFromEnum(bun.C.E.SOCKTNOSUPPORT), - UV_ENOTSUP => @intFromEnum(bun.C.E.NOTSUP), - UV_EAFNOSUPPORT => @intFromEnum(bun.C.E.AFNOSUPPORT), - UV_EADDRINUSE => @intFromEnum(bun.C.E.ADDRINUSE), - UV_EADDRNOTAVAIL => @intFromEnum(bun.C.E.ADDRNOTAVAIL), - UV_ENETDOWN => @intFromEnum(bun.C.E.NETDOWN), - UV_ENETUNREACH => @intFromEnum(bun.C.E.NETUNREACH), - UV_ECONNABORTED => @intFromEnum(bun.C.E.CONNABORTED), - UV_ECONNRESET => @intFromEnum(bun.C.E.CONNRESET), - UV_ENOBUFS => @intFromEnum(bun.C.E.NOBUFS), - UV_EISCONN => @intFromEnum(bun.C.E.ISCONN), - UV_ENOTCONN => @intFromEnum(bun.C.E.NOTCONN), - UV_ESHUTDOWN => @intFromEnum(bun.C.E.SHUTDOWN), - UV_ETIMEDOUT => @intFromEnum(bun.C.E.TIMEDOUT), - UV_ECONNREFUSED => @intFromEnum(bun.C.E.CONNREFUSED), - UV_EHOSTDOWN => @intFromEnum(bun.C.E.HOSTDOWN), - UV_EHOSTUNREACH => @intFromEnum(bun.C.E.HOSTUNREACH), - UV_EALREADY => @intFromEnum(bun.C.E.ALREADY), - UV_EREMOTEIO => @intFromEnum(bun.C.E.REMOTEIO), - UV_ECANCELED => @intFromEnum(bun.C.E.CANCELED), - UV_ECHARSET => @intFromEnum(bun.C.E.CHARSET), - UV_EOF => @intFromEnum(bun.C.E.EOF), + UV_EPERM => @intFromEnum(bun.sys.E.PERM), + UV_ENOENT => @intFromEnum(bun.sys.E.NOENT), + UV_ESRCH => @intFromEnum(bun.sys.E.SRCH), + UV_EINTR => @intFromEnum(bun.sys.E.INTR), + UV_EIO => @intFromEnum(bun.sys.E.IO), + UV_ENXIO => @intFromEnum(bun.sys.E.NXIO), + UV_E2BIG => @intFromEnum(bun.sys.E.@"2BIG"), + UV_EBADF => @intFromEnum(bun.sys.E.BADF), + UV_EAGAIN => @intFromEnum(bun.sys.E.AGAIN), + UV_ENOMEM => @intFromEnum(bun.sys.E.NOMEM), + UV_EACCES => @intFromEnum(bun.sys.E.ACCES), + UV_EFAULT => @intFromEnum(bun.sys.E.FAULT), + UV_EBUSY => @intFromEnum(bun.sys.E.BUSY), + UV_EEXIST => @intFromEnum(bun.sys.E.EXIST), + UV_EXDEV => @intFromEnum(bun.sys.E.XDEV), + UV_ENODEV => @intFromEnum(bun.sys.E.NODEV), + UV_ENOTDIR => @intFromEnum(bun.sys.E.NOTDIR), + UV_EISDIR => @intFromEnum(bun.sys.E.ISDIR), + UV_EINVAL => @intFromEnum(bun.sys.E.INVAL), + UV_ENFILE => @intFromEnum(bun.sys.E.NFILE), + UV_EMFILE => @intFromEnum(bun.sys.E.MFILE), + UV_ENOTTY => @intFromEnum(bun.sys.E.NOTTY), + UV_ETXTBSY => @intFromEnum(bun.sys.E.TXTBSY), + UV_EFBIG => @intFromEnum(bun.sys.E.FBIG), + UV_ENOSPC => @intFromEnum(bun.sys.E.NOSPC), + UV_ESPIPE => @intFromEnum(bun.sys.E.SPIPE), + UV_EROFS => @intFromEnum(bun.sys.E.ROFS), + UV_EMLINK => @intFromEnum(bun.sys.E.MLINK), + UV_EPIPE => @intFromEnum(bun.sys.E.PIPE), + UV_ERANGE => @intFromEnum(bun.sys.E.RANGE), + UV_ENAMETOOLONG => @intFromEnum(bun.sys.E.NAMETOOLONG), + UV_ENOSYS => @intFromEnum(bun.sys.E.NOSYS), + UV_ENOTEMPTY => @intFromEnum(bun.sys.E.NOTEMPTY), + UV_ELOOP => @intFromEnum(bun.sys.E.LOOP), + UV_EUNATCH => @intFromEnum(bun.sys.E.UNATCH), + UV_ENODATA => @intFromEnum(bun.sys.E.NODATA), + UV_ENONET => @intFromEnum(bun.sys.E.NONET), + UV_EPROTO => @intFromEnum(bun.sys.E.PROTO), + UV_EOVERFLOW => @intFromEnum(bun.sys.E.OVERFLOW), + UV_EILSEQ => @intFromEnum(bun.sys.E.ILSEQ), + UV_ENOTSOCK => @intFromEnum(bun.sys.E.NOTSOCK), + UV_EDESTADDRREQ => @intFromEnum(bun.sys.E.DESTADDRREQ), + UV_EMSGSIZE => @intFromEnum(bun.sys.E.MSGSIZE), + UV_EPROTOTYPE => @intFromEnum(bun.sys.E.PROTOTYPE), + UV_ENOPROTOOPT => @intFromEnum(bun.sys.E.NOPROTOOPT), + UV_EPROTONOSUPPORT => @intFromEnum(bun.sys.E.PROTONOSUPPORT), + UV_ESOCKTNOSUPPORT => @intFromEnum(bun.sys.E.SOCKTNOSUPPORT), + UV_ENOTSUP => @intFromEnum(bun.sys.E.NOTSUP), + UV_EAFNOSUPPORT => @intFromEnum(bun.sys.E.AFNOSUPPORT), + UV_EADDRINUSE => @intFromEnum(bun.sys.E.ADDRINUSE), + UV_EADDRNOTAVAIL => @intFromEnum(bun.sys.E.ADDRNOTAVAIL), + UV_ENETDOWN => @intFromEnum(bun.sys.E.NETDOWN), + UV_ENETUNREACH => @intFromEnum(bun.sys.E.NETUNREACH), + UV_ECONNABORTED => @intFromEnum(bun.sys.E.CONNABORTED), + UV_ECONNRESET => @intFromEnum(bun.sys.E.CONNRESET), + UV_ENOBUFS => @intFromEnum(bun.sys.E.NOBUFS), + UV_EISCONN => @intFromEnum(bun.sys.E.ISCONN), + UV_ENOTCONN => @intFromEnum(bun.sys.E.NOTCONN), + UV_ESHUTDOWN => @intFromEnum(bun.sys.E.SHUTDOWN), + UV_ETIMEDOUT => @intFromEnum(bun.sys.E.TIMEDOUT), + UV_ECONNREFUSED => @intFromEnum(bun.sys.E.CONNREFUSED), + UV_EHOSTDOWN => @intFromEnum(bun.sys.E.HOSTDOWN), + UV_EHOSTUNREACH => @intFromEnum(bun.sys.E.HOSTUNREACH), + UV_EALREADY => @intFromEnum(bun.sys.E.ALREADY), + UV_EREMOTEIO => @intFromEnum(bun.sys.E.REMOTEIO), + UV_ECANCELED => @intFromEnum(bun.sys.E.CANCELED), + UV_ECHARSET => @intFromEnum(bun.sys.E.CHARSET), + UV_EOF => @intFromEnum(bun.sys.E.EOF), else => null, } else null; } - pub inline fn errEnum(this: ReturnCode) ?bun.C.E { + pub inline fn errEnum(this: ReturnCode) ?bun.sys.E { return if (this.int() < 0) (translateUVErrorToE(this.int())) else @@ -2937,14 +2937,14 @@ pub const ReturnCodeI64 = enum(i64) { return null; } - pub inline fn errno(this: ReturnCodeI64) ?@TypeOf(@intFromEnum(bun.C.E.ACCES)) { + pub inline fn errno(this: ReturnCodeI64) ?@TypeOf(@intFromEnum(bun.sys.E.ACCES)) { return if (@intFromEnum(this) < 0) @as(u16, @intCast(-@intFromEnum(this))) else null; } - pub inline fn errEnum(this: ReturnCodeI64) ?bun.C.E { + pub inline fn errEnum(this: ReturnCodeI64) ?bun.sys.E { return if (@intFromEnum(this) < 0) (translateUVErrorToE(@intFromEnum(this))) else @@ -2993,7 +2993,7 @@ fn StreamMixin(comptime Type: type) type { this: *Type, context: anytype, comptime alloc_cb: *const (fn (@TypeOf(context), suggested_size: usize) []u8), - comptime error_cb: *const (fn (@TypeOf(context), err: bun.C.E) void), + comptime error_cb: *const (fn (@TypeOf(context), err: bun.sys.E) void), comptime read_cb: *const (fn (@TypeOf(context), data: []const u8) void), ) Maybe(void) { const Context = @TypeOf(context); @@ -3008,7 +3008,7 @@ fn StreamMixin(comptime Type: type) type { if (nreads == 0) return; // EAGAIN or EWOULDBLOCK if (nreads < 0) { req.readStop(); - error_cb(context_data, ReturnCodeI64.init(nreads).errEnum() orelse bun.C.E.CANCELED); + error_cb(context_data, ReturnCodeI64.init(nreads).errEnum() orelse bun.sys.E.CANCELED); } else { read_cb(context_data, buffer.slice()); } diff --git a/src/deps/uws.zig b/src/deps/uws.zig index c2bc4fe8c1..0885acc79e 100644 --- a/src/deps/uws.zig +++ b/src/deps/uws.zig @@ -170,7 +170,7 @@ pub const UpgradedDuplex = struct { const globalThis = this.global.?; const writeOrEnd = if (msg_more) duplex.getFunction(globalThis, "write") catch return orelse return else duplex.getFunction(globalThis, "end") catch return orelse return; if (data) |data_| { - const buffer = JSC.BinaryType.toJS(.Buffer, data_, globalThis); + const buffer = JSC.ArrayBuffer.BinaryType.toJS(.Buffer, data_, globalThis); buffer.ensureStillAlive(); _ = writeOrEnd.call(globalThis, duplex, &.{buffer}) catch |err| { @@ -217,7 +217,7 @@ pub const UpgradedDuplex = struct { const function = callframe.callee(); const args = callframe.arguments_old(1); - if (JSC.getFunctionData(function)) |self| { + if (JSC.host_fn.getFunctionData(function)) |self| { const this = @as(*UpgradedDuplex, @ptrCast(@alignCast(self))); if (args.len >= 1) { const data_arg = args.ptr[0]; @@ -249,7 +249,7 @@ pub const UpgradedDuplex = struct { _ = globalObject; const function = callframe.callee(); - if (JSC.getFunctionData(function)) |self| { + if (JSC.host_fn.getFunctionData(function)) |self| { const this = @as(*UpgradedDuplex, @ptrCast(@alignCast(self))); if (this.wrapper != null) { @@ -267,7 +267,7 @@ pub const UpgradedDuplex = struct { _ = globalObject; const function = callframe.callee(); - if (JSC.getFunctionData(function)) |self| { + if (JSC.host_fn.getFunctionData(function)) |self| { const this = @as(*UpgradedDuplex, @ptrCast(@alignCast(self))); // flush pending data if (this.wrapper) |*wrapper| { @@ -289,7 +289,7 @@ pub const UpgradedDuplex = struct { _ = globalObject; const function = callframe.callee(); - if (JSC.getFunctionData(function)) |self| { + if (JSC.host_fn.getFunctionData(function)) |self| { const this = @as(*UpgradedDuplex, @ptrCast(@alignCast(self))); // flush pending data if (this.wrapper) |*wrapper| { @@ -337,7 +337,7 @@ pub const UpgradedDuplex = struct { { const callback = this.onDataCallback.get() orelse brk: { - const dataCallback = JSC.NewFunctionWithData( + const dataCallback = JSC.host_fn.NewFunctionWithData( globalThis, null, 0, @@ -347,7 +347,7 @@ pub const UpgradedDuplex = struct { ); dataCallback.ensureStillAlive(); - JSC.setFunctionData(dataCallback, this); + JSC.host_fn.setFunctionData(dataCallback, this); this.onDataCallback = JSC.Strong.create(dataCallback, globalThis); break :brk dataCallback; @@ -357,7 +357,7 @@ pub const UpgradedDuplex = struct { { const callback = this.onEndCallback.get() orelse brk: { - const endCallback = JSC.NewFunctionWithData( + const endCallback = JSC.host_fn.NewFunctionWithData( globalThis, null, 0, @@ -367,7 +367,7 @@ pub const UpgradedDuplex = struct { ); endCallback.ensureStillAlive(); - JSC.setFunctionData(endCallback, this); + JSC.host_fn.setFunctionData(endCallback, this); this.onEndCallback = JSC.Strong.create(endCallback, globalThis); break :brk endCallback; @@ -377,7 +377,7 @@ pub const UpgradedDuplex = struct { { const callback = this.onWritableCallback.get() orelse brk: { - const writableCallback = JSC.NewFunctionWithData( + const writableCallback = JSC.host_fn.NewFunctionWithData( globalThis, null, 0, @@ -387,7 +387,7 @@ pub const UpgradedDuplex = struct { ); writableCallback.ensureStillAlive(); - JSC.setFunctionData(writableCallback, this); + JSC.host_fn.setFunctionData(writableCallback, this); this.onWritableCallback = JSC.Strong.create(writableCallback, globalThis); break :brk writableCallback; }; @@ -396,7 +396,7 @@ pub const UpgradedDuplex = struct { { const callback = this.onCloseCallback.get() orelse brk: { - const closeCallback = JSC.NewFunctionWithData( + const closeCallback = JSC.host_fn.NewFunctionWithData( globalThis, null, 0, @@ -406,7 +406,7 @@ pub const UpgradedDuplex = struct { ); closeCallback.ensureStillAlive(); - JSC.setFunctionData(closeCallback, this); + JSC.host_fn.setFunctionData(closeCallback, this); this.onCloseCallback = JSC.Strong.create(closeCallback, globalThis); break :brk closeCallback; }; @@ -528,19 +528,19 @@ pub const UpgradedDuplex = struct { this.origin.deinit(); if (this.onDataCallback.get()) |callback| { - JSC.setFunctionData(callback, null); + JSC.host_fn.setFunctionData(callback, null); this.onDataCallback.deinit(); } if (this.onEndCallback.get()) |callback| { - JSC.setFunctionData(callback, null); + JSC.host_fn.setFunctionData(callback, null); this.onEndCallback.deinit(); } if (this.onWritableCallback.get()) |callback| { - JSC.setFunctionData(callback, null); + JSC.host_fn.setFunctionData(callback, null); this.onWritableCallback.deinit(); } if (this.onCloseCallback.get()) |callback| { - JSC.setFunctionData(callback, null); + JSC.host_fn.setFunctionData(callback, null); this.onCloseCallback.deinit(); } var ssl_error = this.ssl_error; @@ -660,7 +660,7 @@ pub const WindowsNamedPipe = if (Environment.isWindows) struct { } } - fn onReadError(this: *WindowsNamedPipe, err: bun.C.E) void { + fn onReadError(this: *WindowsNamedPipe, err: bun.sys.E) void { log("onReadError", .{}); if (err == .EOF) { // we received FIN but we dont allow half-closed connections right now @@ -851,7 +851,7 @@ pub const WindowsNamedPipe = if (Environment.isWindows) struct { }) catch { return .{ .err = .{ - .errno = @intFromEnum(bun.C.E.PIPE), + .errno = @intFromEnum(bun.sys.E.PIPE), .syscall = .connect, }, }; @@ -899,7 +899,7 @@ pub const WindowsNamedPipe = if (Environment.isWindows) struct { }) catch { return .{ .err = .{ - .errno = @intFromEnum(bun.C.E.PIPE), + .errno = @intFromEnum(bun.sys.E.PIPE), .syscall = .connect, }, }; @@ -937,7 +937,7 @@ pub const WindowsNamedPipe = if (Environment.isWindows) struct { }) catch { return .{ .err = .{ - .errno = @intFromEnum(bun.C.E.PIPE), + .errno = @intFromEnum(bun.sys.E.PIPE), .syscall = .connect, }, }; @@ -980,7 +980,7 @@ pub const WindowsNamedPipe = if (Environment.isWindows) struct { return false; } const stream = this.writer.getStream() orelse { - this.onError(bun.sys.Error.fromCode(bun.C.E.PIPE, .read)); + this.onError(bun.sys.Error.fromCode(bun.sys.E.PIPE, .read)); return false; }; diff --git a/src/deps/uws/socket.zig b/src/deps/uws/socket.zig index cda69d45c1..49410e1558 100644 --- a/src/deps/uws/socket.zig +++ b/src/deps/uws/socket.zig @@ -72,7 +72,7 @@ pub const Socket = opaque { us_socket_local_address(@intFromBool(ssl), this, buf.ptr, &length); if (length < 0) { - const errno = bun.C.getErrno(length); + const errno = bun.sys.getErrno(length); bun.debugAssert(errno != .SUCCESS); return bun.errnoToZigErr(errno); } @@ -87,7 +87,7 @@ pub const Socket = opaque { us_socket_remote_address(@intFromBool(ssl), this, buf.ptr, &length); if (length < 0) { - const errno = bun.C.getErrno(length); + const errno = bun.sys.getErrno(length); bun.debugAssert(errno != .SUCCESS); return bun.errnoToZigErr(errno); } diff --git a/src/dns.zig b/src/dns.zig index dc8cc51606..997137de21 100644 --- a/src/dns.zig +++ b/src/dns.zig @@ -470,4 +470,4 @@ pub fn addrInfoToJSArray( return array; } -pub const internal = bun.JSC.DNS.InternalDNS; +pub const internal = bun.api.DNS.InternalDNS; diff --git a/src/env_loader.zig b/src/env_loader.zig index 47453b9c88..5b0261fb6b 100644 --- a/src/env_loader.zig +++ b/src/env_loader.zig @@ -10,7 +10,7 @@ const MutableString = bun.MutableString; const stringZ = bun.stringZ; const default_allocator = bun.default_allocator; const CodePoint = bun.CodePoint; -const C = bun.C; + const CodepointIterator = @import("./string_immutable.zig").CodepointIterator; const Analytics = @import("./analytics/analytics_thread.zig"); const Fs = @import("./fs.zig"); diff --git a/src/errno/darwin_errno.zig b/src/errno/darwin_errno.zig new file mode 100644 index 0000000000..9b59aaf251 --- /dev/null +++ b/src/errno/darwin_errno.zig @@ -0,0 +1,205 @@ +pub const Mode = std.posix.mode_t; +pub const E = std.posix.E; +pub const S = std.posix.S; + +pub const SystemErrno = enum(u8) { + SUCCESS = 0, + EPERM = 1, + ENOENT = 2, + ESRCH = 3, + EINTR = 4, + EIO = 5, + ENXIO = 6, + E2BIG = 7, + ENOEXEC = 8, + EBADF = 9, + ECHILD = 10, + EDEADLK = 11, + ENOMEM = 12, + EACCES = 13, + EFAULT = 14, + ENOTBLK = 15, + EBUSY = 16, + EEXIST = 17, + EXDEV = 18, + ENODEV = 19, + ENOTDIR = 20, + EISDIR = 21, + EINVAL = 22, + ENFILE = 23, + EMFILE = 24, + ENOTTY = 25, + ETXTBSY = 26, + EFBIG = 27, + ENOSPC = 28, + ESPIPE = 29, + EROFS = 30, + EMLINK = 31, + EPIPE = 32, + EDOM = 33, + ERANGE = 34, + EAGAIN = 35, + EINPROGRESS = 36, + EALREADY = 37, + ENOTSOCK = 38, + EDESTADDRREQ = 39, + EMSGSIZE = 40, + EPROTOTYPE = 41, + ENOPROTOOPT = 42, + EPROTONOSUPPORT = 43, + ESOCKTNOSUPPORT = 44, + ENOTSUP = 45, + EPFNOSUPPORT = 46, + EAFNOSUPPORT = 47, + EADDRINUSE = 48, + EADDRNOTAVAIL = 49, + ENETDOWN = 50, + ENETUNREACH = 51, + ENETRESET = 52, + ECONNABORTED = 53, + ECONNRESET = 54, + ENOBUFS = 55, + EISCONN = 56, + ENOTCONN = 57, + ESHUTDOWN = 58, + ETOOMANYREFS = 59, + ETIMEDOUT = 60, + ECONNREFUSED = 61, + ELOOP = 62, + ENAMETOOLONG = 63, + EHOSTDOWN = 64, + EHOSTUNREACH = 65, + ENOTEMPTY = 66, + EPROCLIM = 67, + EUSERS = 68, + EDQUOT = 69, + ESTALE = 70, + EREMOTE = 71, + EBADRPC = 72, + ERPCMISMATCH = 73, + EPROGUNAVAIL = 74, + EPROGMISMATCH = 75, + EPROCUNAVAIL = 76, + ENOLCK = 77, + ENOSYS = 78, + EFTYPE = 79, + EAUTH = 80, + ENEEDAUTH = 81, + EPWROFF = 82, + EDEVERR = 83, + EOVERFLOW = 84, + EBADEXEC = 85, + EBADARCH = 86, + ESHLIBVERS = 87, + EBADMACHO = 88, + ECANCELED = 89, + EIDRM = 90, + ENOMSG = 91, + EILSEQ = 92, + ENOATTR = 93, + EBADMSG = 94, + EMULTIHOP = 95, + ENODATA = 96, + ENOLINK = 97, + ENOSR = 98, + ENOSTR = 99, + EPROTO = 100, + ETIME = 101, + EOPNOTSUPP = 102, + ENOPOLICY = 103, + ENOTRECOVERABLE = 104, + EOWNERDEAD = 105, + EQFULL = 106, + + pub const max = 107; + + pub fn init(code: anytype) ?SystemErrno { + if (code < 0) { + if (code <= -max) { + return null; + } + return @enumFromInt(-code); + } + if (code >= max) return null; + return @enumFromInt(code); + } +}; +pub const UV_E = struct { + pub const @"2BIG": i32 = @intFromEnum(SystemErrno.E2BIG); + pub const ACCES: i32 = @intFromEnum(SystemErrno.EACCES); + pub const ADDRINUSE: i32 = @intFromEnum(SystemErrno.EADDRINUSE); + pub const ADDRNOTAVAIL: i32 = @intFromEnum(SystemErrno.EADDRNOTAVAIL); + pub const AFNOSUPPORT: i32 = @intFromEnum(SystemErrno.EAFNOSUPPORT); + pub const AGAIN: i32 = @intFromEnum(SystemErrno.EAGAIN); + pub const ALREADY: i32 = @intFromEnum(SystemErrno.EALREADY); + pub const BADF: i32 = @intFromEnum(SystemErrno.EBADF); + pub const BUSY: i32 = @intFromEnum(SystemErrno.EBUSY); + pub const CANCELED: i32 = @intFromEnum(SystemErrno.ECANCELED); + pub const CHARSET: i32 = -bun.windows.libuv.UV__ECHARSET; + pub const CONNABORTED: i32 = @intFromEnum(SystemErrno.ECONNABORTED); + pub const CONNREFUSED: i32 = @intFromEnum(SystemErrno.ECONNREFUSED); + pub const CONNRESET: i32 = @intFromEnum(SystemErrno.ECONNRESET); + pub const DESTADDRREQ: i32 = @intFromEnum(SystemErrno.EDESTADDRREQ); + pub const EXIST: i32 = @intFromEnum(SystemErrno.EEXIST); + pub const FAULT: i32 = @intFromEnum(SystemErrno.EFAULT); + pub const HOSTUNREACH: i32 = @intFromEnum(SystemErrno.EHOSTUNREACH); + pub const INTR: i32 = @intFromEnum(SystemErrno.EINTR); + pub const INVAL: i32 = @intFromEnum(SystemErrno.EINVAL); + pub const IO: i32 = @intFromEnum(SystemErrno.EIO); + pub const ISCONN: i32 = @intFromEnum(SystemErrno.EISCONN); + pub const ISDIR: i32 = @intFromEnum(SystemErrno.EISDIR); + pub const LOOP: i32 = @intFromEnum(SystemErrno.ELOOP); + pub const MFILE: i32 = @intFromEnum(SystemErrno.EMFILE); + pub const MSGSIZE: i32 = @intFromEnum(SystemErrno.EMSGSIZE); + pub const NAMETOOLONG: i32 = @intFromEnum(SystemErrno.ENAMETOOLONG); + pub const NETDOWN: i32 = @intFromEnum(SystemErrno.ENETDOWN); + pub const NETUNREACH: i32 = @intFromEnum(SystemErrno.ENETUNREACH); + pub const NFILE: i32 = @intFromEnum(SystemErrno.ENFILE); + pub const NOBUFS: i32 = @intFromEnum(SystemErrno.ENOBUFS); + pub const NODEV: i32 = @intFromEnum(SystemErrno.ENODEV); + pub const NOENT: i32 = @intFromEnum(SystemErrno.ENOENT); + pub const NOMEM: i32 = @intFromEnum(SystemErrno.ENOMEM); + pub const NONET: i32 = -bun.windows.libuv.UV_ENONET; + pub const NOSPC: i32 = @intFromEnum(SystemErrno.ENOSPC); + pub const NOSYS: i32 = @intFromEnum(SystemErrno.ENOSYS); + pub const NOTCONN: i32 = @intFromEnum(SystemErrno.ENOTCONN); + pub const NOTDIR: i32 = @intFromEnum(SystemErrno.ENOTDIR); + pub const NOTEMPTY: i32 = @intFromEnum(SystemErrno.ENOTEMPTY); + pub const NOTSOCK: i32 = @intFromEnum(SystemErrno.ENOTSOCK); + pub const NOTSUP: i32 = @intFromEnum(SystemErrno.ENOTSUP); + pub const PERM: i32 = @intFromEnum(SystemErrno.EPERM); + pub const PIPE: i32 = @intFromEnum(SystemErrno.EPIPE); + pub const PROTO: i32 = @intFromEnum(SystemErrno.EPROTO); + pub const PROTONOSUPPORT: i32 = @intFromEnum(SystemErrno.EPROTONOSUPPORT); + pub const PROTOTYPE: i32 = @intFromEnum(SystemErrno.EPROTOTYPE); + pub const ROFS: i32 = @intFromEnum(SystemErrno.EROFS); + pub const SHUTDOWN: i32 = @intFromEnum(SystemErrno.ESHUTDOWN); + pub const SPIPE: i32 = @intFromEnum(SystemErrno.ESPIPE); + pub const SRCH: i32 = @intFromEnum(SystemErrno.ESRCH); + pub const TIMEDOUT: i32 = @intFromEnum(SystemErrno.ETIMEDOUT); + pub const TXTBSY: i32 = @intFromEnum(SystemErrno.ETXTBSY); + pub const XDEV: i32 = @intFromEnum(SystemErrno.EXDEV); + pub const FBIG: i32 = @intFromEnum(SystemErrno.EFBIG); + pub const NOPROTOOPT: i32 = @intFromEnum(SystemErrno.ENOPROTOOPT); + pub const RANGE: i32 = @intFromEnum(SystemErrno.ERANGE); + pub const NXIO: i32 = @intFromEnum(SystemErrno.ENXIO); + pub const MLINK: i32 = @intFromEnum(SystemErrno.EMLINK); + pub const HOSTDOWN: i32 = @intFromEnum(SystemErrno.EHOSTDOWN); + pub const REMOTEIO: i32 = -bun.windows.libuv.UV_EREMOTEIO; + pub const NOTTY: i32 = @intFromEnum(SystemErrno.ENOTTY); + pub const FTYPE: i32 = @intFromEnum(SystemErrno.EFTYPE); + pub const ILSEQ: i32 = @intFromEnum(SystemErrno.EILSEQ); + pub const OVERFLOW: i32 = @intFromEnum(SystemErrno.EOVERFLOW); + pub const SOCKTNOSUPPORT: i32 = @intFromEnum(SystemErrno.ESOCKTNOSUPPORT); + pub const NODATA: i32 = @intFromEnum(SystemErrno.ENODATA); + pub const UNATCH: i32 = -bun.windows.libuv.UV_EUNATCH; +}; +pub fn getErrno(rc: anytype) E { + if (rc == -1) { + return @enumFromInt(std.c._errno().*); + } else { + return .SUCCESS; + } +} +const std = @import("std"); +const bun = @import("bun"); diff --git a/src/errno/linux_errno.zig b/src/errno/linux_errno.zig new file mode 100644 index 0000000000..e1bdccdd4b --- /dev/null +++ b/src/errno/linux_errno.zig @@ -0,0 +1,251 @@ +pub const Mode = std.posix.mode_t; +pub const E = std.posix.E; +pub const S = std.posix.S; +pub const SystemErrno = enum(u8) { + SUCCESS = 0, + EPERM = 1, + ENOENT = 2, + ESRCH = 3, + EINTR = 4, + EIO = 5, + ENXIO = 6, + E2BIG = 7, + ENOEXEC = 8, + EBADF = 9, + ECHILD = 10, + EAGAIN = 11, + ENOMEM = 12, + EACCES = 13, + EFAULT = 14, + ENOTBLK = 15, + EBUSY = 16, + EEXIST = 17, + EXDEV = 18, + ENODEV = 19, + ENOTDIR = 20, + EISDIR = 21, + EINVAL = 22, + ENFILE = 23, + EMFILE = 24, + ENOTTY = 25, + ETXTBSY = 26, + EFBIG = 27, + ENOSPC = 28, + ESPIPE = 29, + EROFS = 30, + EMLINK = 31, + EPIPE = 32, + EDOM = 33, + ERANGE = 34, + EDEADLK = 35, + ENAMETOOLONG = 36, + ENOLCK = 37, + ENOSYS = 38, + ENOTEMPTY = 39, + ELOOP = 40, + EWOULDBLOCK = 41, + ENOMSG = 42, + EIDRM = 43, + ECHRNG = 44, + EL2NSYNC = 45, + EL3HLT = 46, + EL3RST = 47, + ELNRNG = 48, + EUNATCH = 49, + ENOCSI = 50, + EL2HLT = 51, + EBADE = 52, + EBADR = 53, + EXFULL = 54, + ENOANO = 55, + EBADRQC = 56, + EBADSLT = 57, + EDEADLOCK = 58, + EBFONT = 59, + ENOSTR = 60, + ENODATA = 61, + ETIME = 62, + ENOSR = 63, + ENONET = 64, + ENOPKG = 65, + EREMOTE = 66, + ENOLINK = 67, + EADV = 68, + ESRMNT = 69, + ECOMM = 70, + EPROTO = 71, + EMULTIHOP = 72, + EDOTDOT = 73, + EBADMSG = 74, + EOVERFLOW = 75, + ENOTUNIQ = 76, + EBADFD = 77, + EREMCHG = 78, + ELIBACC = 79, + ELIBBAD = 80, + ELIBSCN = 81, + ELIBMAX = 82, + ELIBEXEC = 83, + EILSEQ = 84, + ERESTART = 85, + ESTRPIPE = 86, + EUSERS = 87, + ENOTSOCK = 88, + EDESTADDRREQ = 89, + EMSGSIZE = 90, + EPROTOTYPE = 91, + ENOPROTOOPT = 92, + EPROTONOSUPPORT = 93, + ESOCKTNOSUPPORT = 94, + /// For Linux, EOPNOTSUPP is the real value + /// but it's ~the same and is incompatible across operating systems + /// https://lists.gnu.org/archive/html/bug-glibc/2002-08/msg00017.html + ENOTSUP = 95, + EPFNOSUPPORT = 96, + EAFNOSUPPORT = 97, + EADDRINUSE = 98, + EADDRNOTAVAIL = 99, + ENETDOWN = 100, + ENETUNREACH = 101, + ENETRESET = 102, + ECONNABORTED = 103, + ECONNRESET = 104, + ENOBUFS = 105, + EISCONN = 106, + ENOTCONN = 107, + ESHUTDOWN = 108, + ETOOMANYREFS = 109, + ETIMEDOUT = 110, + ECONNREFUSED = 111, + EHOSTDOWN = 112, + EHOSTUNREACH = 113, + EALREADY = 114, + EINPROGRESS = 115, + ESTALE = 116, + EUCLEAN = 117, + ENOTNAM = 118, + ENAVAIL = 119, + EISNAM = 120, + EREMOTEIO = 121, + EDQUOT = 122, + ENOMEDIUM = 123, + EMEDIUMTYPE = 124, + ECANCELED = 125, + ENOKEY = 126, + EKEYEXPIRED = 127, + EKEYREVOKED = 128, + EKEYREJECTED = 129, + EOWNERDEAD = 130, + ENOTRECOVERABLE = 131, + ERFKILL = 132, + EHWPOISON = 133, + + pub const max = 134; + + pub fn init(code: anytype) ?SystemErrno { + if (code < 0) { + if (code <= -max) { + return null; + } + return @enumFromInt(-code); + } + if (code >= max) return null; + return @enumFromInt(code); + } +}; +pub const UV_E = struct { + pub const @"2BIG": i32 = @intFromEnum(SystemErrno.E2BIG); + pub const ACCES: i32 = @intFromEnum(SystemErrno.EACCES); + pub const ADDRINUSE: i32 = @intFromEnum(SystemErrno.EADDRINUSE); + pub const ADDRNOTAVAIL: i32 = @intFromEnum(SystemErrno.EADDRNOTAVAIL); + pub const AFNOSUPPORT: i32 = @intFromEnum(SystemErrno.EAFNOSUPPORT); + pub const AGAIN: i32 = @intFromEnum(SystemErrno.EAGAIN); + pub const ALREADY: i32 = @intFromEnum(SystemErrno.EALREADY); + pub const BADF: i32 = @intFromEnum(SystemErrno.EBADF); + pub const BUSY: i32 = @intFromEnum(SystemErrno.EBUSY); + pub const CANCELED: i32 = @intFromEnum(SystemErrno.ECANCELED); + pub const CHARSET: i32 = -bun.windows.libuv.UV_ECHARSET; + pub const CONNABORTED: i32 = @intFromEnum(SystemErrno.ECONNABORTED); + pub const CONNREFUSED: i32 = @intFromEnum(SystemErrno.ECONNREFUSED); + pub const CONNRESET: i32 = @intFromEnum(SystemErrno.ECONNRESET); + pub const DESTADDRREQ: i32 = @intFromEnum(SystemErrno.EDESTADDRREQ); + pub const EXIST: i32 = @intFromEnum(SystemErrno.EEXIST); + pub const FAULT: i32 = @intFromEnum(SystemErrno.EFAULT); + pub const HOSTUNREACH: i32 = @intFromEnum(SystemErrno.EHOSTUNREACH); + pub const INTR: i32 = @intFromEnum(SystemErrno.EINTR); + pub const INVAL: i32 = @intFromEnum(SystemErrno.EINVAL); + pub const IO: i32 = @intFromEnum(SystemErrno.EIO); + pub const ISCONN: i32 = @intFromEnum(SystemErrno.EISCONN); + pub const ISDIR: i32 = @intFromEnum(SystemErrno.EISDIR); + pub const LOOP: i32 = @intFromEnum(SystemErrno.ELOOP); + pub const MFILE: i32 = @intFromEnum(SystemErrno.EMFILE); + pub const MSGSIZE: i32 = @intFromEnum(SystemErrno.EMSGSIZE); + pub const NAMETOOLONG: i32 = @intFromEnum(SystemErrno.ENAMETOOLONG); + pub const NETDOWN: i32 = @intFromEnum(SystemErrno.ENETDOWN); + pub const NETUNREACH: i32 = @intFromEnum(SystemErrno.ENETUNREACH); + pub const NFILE: i32 = @intFromEnum(SystemErrno.ENFILE); + pub const NOBUFS: i32 = @intFromEnum(SystemErrno.ENOBUFS); + pub const NODEV: i32 = @intFromEnum(SystemErrno.ENODEV); + pub const NOENT: i32 = @intFromEnum(SystemErrno.ENOENT); + pub const NOMEM: i32 = @intFromEnum(SystemErrno.ENOMEM); + pub const NONET: i32 = @intFromEnum(SystemErrno.ENONET); + pub const NOSPC: i32 = @intFromEnum(SystemErrno.ENOSPC); + pub const NOSYS: i32 = @intFromEnum(SystemErrno.ENOSYS); + pub const NOTCONN: i32 = @intFromEnum(SystemErrno.ENOTCONN); + pub const NOTDIR: i32 = @intFromEnum(SystemErrno.ENOTDIR); + pub const NOTEMPTY: i32 = @intFromEnum(SystemErrno.ENOTEMPTY); + pub const NOTSOCK: i32 = @intFromEnum(SystemErrno.ENOTSOCK); + pub const NOTSUP: i32 = @intFromEnum(SystemErrno.ENOTSUP); + pub const PERM: i32 = @intFromEnum(SystemErrno.EPERM); + pub const PIPE: i32 = @intFromEnum(SystemErrno.EPIPE); + pub const PROTO: i32 = @intFromEnum(SystemErrno.EPROTO); + pub const PROTONOSUPPORT: i32 = @intFromEnum(SystemErrno.EPROTONOSUPPORT); + pub const PROTOTYPE: i32 = @intFromEnum(SystemErrno.EPROTOTYPE); + pub const ROFS: i32 = @intFromEnum(SystemErrno.EROFS); + pub const SHUTDOWN: i32 = @intFromEnum(SystemErrno.ESHUTDOWN); + pub const SPIPE: i32 = @intFromEnum(SystemErrno.ESPIPE); + pub const SRCH: i32 = @intFromEnum(SystemErrno.ESRCH); + pub const TIMEDOUT: i32 = @intFromEnum(SystemErrno.ETIMEDOUT); + pub const TXTBSY: i32 = @intFromEnum(SystemErrno.ETXTBSY); + pub const XDEV: i32 = @intFromEnum(SystemErrno.EXDEV); + pub const FBIG: i32 = @intFromEnum(SystemErrno.EFBIG); + pub const NOPROTOOPT: i32 = @intFromEnum(SystemErrno.ENOPROTOOPT); + pub const RANGE: i32 = @intFromEnum(SystemErrno.ERANGE); + pub const NXIO: i32 = @intFromEnum(SystemErrno.ENXIO); + pub const MLINK: i32 = @intFromEnum(SystemErrno.EMLINK); + pub const HOSTDOWN: i32 = @intFromEnum(SystemErrno.EHOSTDOWN); + pub const REMOTEIO: i32 = @intFromEnum(SystemErrno.EREMOTEIO); + pub const NOTTY: i32 = @intFromEnum(SystemErrno.ENOTTY); + pub const FTYPE: i32 = -bun.windows.libuv.UV_EFTYPE; + pub const ILSEQ: i32 = @intFromEnum(SystemErrno.EILSEQ); + pub const OVERFLOW: i32 = @intFromEnum(SystemErrno.EOVERFLOW); + pub const SOCKTNOSUPPORT: i32 = @intFromEnum(SystemErrno.ESOCKTNOSUPPORT); + pub const NODATA: i32 = @intFromEnum(SystemErrno.ENODATA); + pub const UNATCH: i32 = @intFromEnum(SystemErrno.EUNATCH); +}; +pub fn getErrno(rc: anytype) E { + const Type = @TypeOf(rc); + + return switch (Type) { + // raw system calls from std.os.linux.* will return usize + // the errno is stored in this value + usize => { + const signed: isize = @bitCast(rc); + const int = if (signed > -4096 and signed < 0) -signed else 0; + return @enumFromInt(int); + }, + + // glibc system call wrapper returns i32/int + // the errno is stored in a thread local variable + // + // TODO: the inclusion of 'u32' and 'isize' seems suspicious + i32, c_int, u32, isize, i64 => if (rc == -1) + @enumFromInt(std.c._errno().*) + else + .SUCCESS, + + else => @compileError("Not implemented yet for type " ++ @typeName(Type)), + }; +} +const std = @import("std"); +const bun = @import("bun"); diff --git a/src/windows_c.zig b/src/errno/windows_errno.zig similarity index 76% rename from src/windows_c.zig rename to src/errno/windows_errno.zig index 64930d5fbb..be989ac9e5 100644 --- a/src/windows_c.zig +++ b/src/errno/windows_errno.zig @@ -1,66 +1,301 @@ -const std = @import("std"); -const bun = @import("bun"); -const builtin = @import("builtin"); -const win32 = std.os.windows; -const posix = std.posix; -const mem = std.mem; -const Stat = std.fs.File.Stat; -const Kind = std.fs.File.Kind; -const StatError = std.fs.File.StatError; +pub const E = enum(u16) { + SUCCESS = 0, + PERM = 1, + NOENT = 2, + SRCH = 3, + INTR = 4, + IO = 5, + NXIO = 6, + @"2BIG" = 7, + NOEXEC = 8, + BADF = 9, + CHILD = 10, + AGAIN = 11, + NOMEM = 12, + ACCES = 13, + FAULT = 14, + NOTBLK = 15, + BUSY = 16, + EXIST = 17, + XDEV = 18, + NODEV = 19, + NOTDIR = 20, + ISDIR = 21, + INVAL = 22, + NFILE = 23, + MFILE = 24, + NOTTY = 25, + TXTBSY = 26, + FBIG = 27, + NOSPC = 28, + SPIPE = 29, + ROFS = 30, + MLINK = 31, + PIPE = 32, + DOM = 33, + RANGE = 34, + DEADLK = 35, + NAMETOOLONG = 36, + NOLCK = 37, + NOSYS = 38, + NOTEMPTY = 39, + LOOP = 40, + WOULDBLOCK = 41, + NOMSG = 42, + IDRM = 43, + CHRNG = 44, + L2NSYNC = 45, + L3HLT = 46, + L3RST = 47, + LNRNG = 48, + UNATCH = 49, + NOCSI = 50, + L2HLT = 51, + BADE = 52, + BADR = 53, + XFULL = 54, + NOANO = 55, + BADRQC = 56, + BADSLT = 57, + DEADLOCK = 58, + BFONT = 59, + NOSTR = 60, + NODATA = 61, + TIME = 62, + NOSR = 63, + NONET = 64, + NOPKG = 65, + REMOTE = 66, + NOLINK = 67, + ADV = 68, + SRMNT = 69, + COMM = 70, + PROTO = 71, + MULTIHOP = 72, + DOTDOT = 73, + BADMSG = 74, + OVERFLOW = 75, + NOTUNIQ = 76, + BADFD = 77, + REMCHG = 78, + LIBACC = 79, + LIBBAD = 80, + LIBSCN = 81, + LIBMAX = 82, + LIBEXEC = 83, + ILSEQ = 84, + RESTART = 85, + STRPIPE = 86, + USERS = 87, + NOTSOCK = 88, + DESTADDRREQ = 89, + MSGSIZE = 90, + PROTOTYPE = 91, + NOPROTOOPT = 92, + PROTONOSUPPORT = 93, + SOCKTNOSUPPORT = 94, + NOTSUP = 95, + PFNOSUPPORT = 96, + AFNOSUPPORT = 97, + ADDRINUSE = 98, + ADDRNOTAVAIL = 99, + NETDOWN = 100, + NETUNREACH = 101, + NETRESET = 102, + CONNABORTED = 103, + CONNRESET = 104, + NOBUFS = 105, + ISCONN = 106, + NOTCONN = 107, + SHUTDOWN = 108, + TOOMANYREFS = 109, + TIMEDOUT = 110, + CONNREFUSED = 111, + HOSTDOWN = 112, + HOSTUNREACH = 113, + ALREADY = 114, + INPROGRESS = 115, + STALE = 116, + UCLEAN = 117, + NOTNAM = 118, + NAVAIL = 119, + ISNAM = 120, + REMOTEIO = 121, + DQUOT = 122, + NOMEDIUM = 123, + MEDIUMTYPE = 124, + CANCELED = 125, + NOKEY = 126, + KEYEXPIRED = 127, + KEYREVOKED = 128, + KEYREJECTED = 129, + OWNERDEAD = 130, + NOTRECOVERABLE = 131, + RFKILL = 132, + HWPOISON = 133, + UNKNOWN = 134, + CHARSET = 135, + EOF = 136, -// Windows doesn't have memmem, so we need to implement it -// this is used in src/string_immutable.zig -pub export fn memmem(haystack: ?[*]const u8, haystacklen: usize, needle: ?[*]const u8, needlelen: usize) ?[*]const u8 { - // Handle null pointers - if (haystack == null or needle == null) return null; - - // Handle empty needle case - if (needlelen == 0) return haystack; - - // Handle case where needle is longer than haystack - if (needlelen > haystacklen) return null; - - const hay = haystack.?[0..haystacklen]; - const nee = needle.?[0..needlelen]; - - const i = std.mem.indexOf(u8, hay, nee) orelse return null; - return hay.ptr + i; -} - -pub const lstat = blk: { - const T = *const fn ([*c]const u8, [*c]std.c.Stat) callconv(.C) c_int; - break :blk @extern(T, .{ .name = "lstat64" }); + UV_E2BIG = -uv.UV_E2BIG, + UV_EACCES = -uv.UV_EACCES, + UV_EADDRINUSE = -uv.UV_EADDRINUSE, + UV_EADDRNOTAVAIL = -uv.UV_EADDRNOTAVAIL, + UV_EAFNOSUPPORT = -uv.UV_EAFNOSUPPORT, + UV_EAGAIN = -uv.UV_EAGAIN, + UV_EAI_ADDRFAMILY = -uv.UV_EAI_ADDRFAMILY, + UV_EAI_AGAIN = -uv.UV_EAI_AGAIN, + UV_EAI_BADFLAGS = -uv.UV_EAI_BADFLAGS, + UV_EAI_BADHINTS = -uv.UV_EAI_BADHINTS, + UV_EAI_CANCELED = -uv.UV_EAI_CANCELED, + UV_EAI_FAIL = -uv.UV_EAI_FAIL, + UV_EAI_FAMILY = -uv.UV_EAI_FAMILY, + UV_EAI_MEMORY = -uv.UV_EAI_MEMORY, + UV_EAI_NODATA = -uv.UV_EAI_NODATA, + UV_EAI_NONAME = -uv.UV_EAI_NONAME, + UV_EAI_OVERFLOW = -uv.UV_EAI_OVERFLOW, + UV_EAI_PROTOCOL = -uv.UV_EAI_PROTOCOL, + UV_EAI_SERVICE = -uv.UV_EAI_SERVICE, + UV_EAI_SOCKTYPE = -uv.UV_EAI_SOCKTYPE, + UV_EALREADY = -uv.UV_EALREADY, + UV_EBADF = -uv.UV_EBADF, + UV_EBUSY = -uv.UV_EBUSY, + UV_ECANCELED = -uv.UV_ECANCELED, + UV_ECHARSET = -uv.UV_ECHARSET, + UV_ECONNABORTED = -uv.UV_ECONNABORTED, + UV_ECONNREFUSED = -uv.UV_ECONNREFUSED, + UV_ECONNRESET = -uv.UV_ECONNRESET, + UV_EDESTADDRREQ = -uv.UV_EDESTADDRREQ, + UV_EEXIST = -uv.UV_EEXIST, + UV_EFAULT = -uv.UV_EFAULT, + UV_EFBIG = -uv.UV_EFBIG, + UV_EHOSTUNREACH = -uv.UV_EHOSTUNREACH, + UV_EINVAL = -uv.UV_EINVAL, + UV_EINTR = -uv.UV_EINTR, + UV_EISCONN = -uv.UV_EISCONN, + UV_EIO = -uv.UV_EIO, + UV_ELOOP = -uv.UV_ELOOP, + UV_EISDIR = -uv.UV_EISDIR, + UV_EMSGSIZE = -uv.UV_EMSGSIZE, + UV_EMFILE = -uv.UV_EMFILE, + UV_ENETDOWN = -uv.UV_ENETDOWN, + UV_ENAMETOOLONG = -uv.UV_ENAMETOOLONG, + UV_ENFILE = -uv.UV_ENFILE, + UV_ENETUNREACH = -uv.UV_ENETUNREACH, + UV_ENODEV = -uv.UV_ENODEV, + UV_ENOBUFS = -uv.UV_ENOBUFS, + UV_ENOMEM = -uv.UV_ENOMEM, + UV_ENOENT = -uv.UV_ENOENT, + UV_ENOPROTOOPT = -uv.UV_ENOPROTOOPT, + UV_ENONET = -uv.UV_ENONET, + UV_ENOSYS = -uv.UV_ENOSYS, + UV_ENOSPC = -uv.UV_ENOSPC, + UV_ENOTDIR = -uv.UV_ENOTDIR, + UV_ENOTCONN = -uv.UV_ENOTCONN, + UV_ENOTSOCK = -uv.UV_ENOTSOCK, + UV_ENOTEMPTY = -uv.UV_ENOTEMPTY, + UV_EOVERFLOW = -uv.UV_EOVERFLOW, + UV_ENOTSUP = -uv.UV_ENOTSUP, + UV_EPIPE = -uv.UV_EPIPE, + UV_EPERM = -uv.UV_EPERM, + UV_EPROTONOSUPPORT = -uv.UV_EPROTONOSUPPORT, + UV_EPROTO = -uv.UV_EPROTO, + UV_ERANGE = -uv.UV_ERANGE, + UV_EPROTOTYPE = -uv.UV_EPROTOTYPE, + UV_ESHUTDOWN = -uv.UV_ESHUTDOWN, + UV_EROFS = -uv.UV_EROFS, + UV_ESRCH = -uv.UV_ESRCH, + UV_ESPIPE = -uv.UV_ESPIPE, + UV_ETXTBSY = -uv.UV_ETXTBSY, + UV_ETIMEDOUT = -uv.UV_ETIMEDOUT, + UV_UNKNOWN = -uv.UV_UNKNOWN, + UV_EXDEV = -uv.UV_EXDEV, + UV_ENXIO = -uv.UV_ENXIO, + UV_EOF = -uv.UV_EOF, + UV_EHOSTDOWN = -uv.UV_EHOSTDOWN, + UV_EMLINK = -uv.UV_EMLINK, + UV_ENOTTY = -uv.UV_ENOTTY, + UV_EREMOTEIO = -uv.UV_EREMOTEIO, + UV_EILSEQ = -uv.UV_EILSEQ, + UV_EFTYPE = -uv.UV_EFTYPE, + UV_ENODATA = -uv.UV_ENODATA, + UV_ESOCKTNOSUPPORT = -uv.UV_ESOCKTNOSUPPORT, + UV_ERRNO_MAX = -uv.UV_ERRNO_MAX, + UV_EUNATCH = -uv.UV_EUNATCH, }; -pub const fstat = blk: { - const T = *const fn ([*c]const u8, [*c]std.c.Stat) callconv(.C) c_int; - break :blk @extern(T, .{ .name = "fstat64" }); -}; -pub const stat = blk: { - const T = *const fn ([*c]const u8, [*c]std.c.Stat) callconv(.C) c_int; - break :blk @extern(T, .{ .name = "stat64" }); +pub const S = struct { + pub const IFMT = 0o170000; + + pub const IFDIR = 0o040000; + pub const IFCHR = 0o020000; + pub const IFBLK = 0o060000; + pub const IFREG = 0o100000; + pub const IFIFO = 0o010000; + pub const IFLNK = 0o120000; + pub const IFSOCK = 0o140000; + + pub const ISUID = 0o4000; + pub const ISGID = 0o2000; + pub const ISVTX = 0o1000; + pub const IRUSR = 0o400; + pub const IWUSR = 0o200; + pub const IXUSR = 0o100; + pub const IRWXU = 0o700; + pub const IRGRP = 0o040; + pub const IWGRP = 0o020; + pub const IXGRP = 0o010; + pub const IRWXG = 0o070; + pub const IROTH = 0o004; + pub const IWOTH = 0o002; + pub const IXOTH = 0o001; + pub const IRWXO = 0o007; + + pub inline fn ISREG(m: i32) bool { + return m & IFMT == IFREG; + } + + pub inline fn ISDIR(m: i32) bool { + return m & IFMT == IFDIR; + } + + pub inline fn ISCHR(m: i32) bool { + return m & IFMT == IFCHR; + } + + pub inline fn ISBLK(m: i32) bool { + return m & IFMT == IFBLK; + } + + pub inline fn ISFIFO(m: i32) bool { + return m & IFMT == IFIFO; + } + + pub inline fn ISLNK(m: i32) bool { + return m & IFMT == IFLNK; + } + + pub inline fn ISSOCK(m: i32) bool { + return m & IFMT == IFSOCK; + } }; -pub fn getTotalMemory() usize { - return uv.uv_get_total_memory(); +pub fn getErrno(rc: anytype) E { + if (comptime @TypeOf(rc) == bun.windows.NTSTATUS) { + return bun.windows.translateNTStatusToErrno(rc); + } + + if (Win32Error.get().toSystemErrno()) |sys| { + return sys.toE(); + } + + if (bun.windows.WSAGetLastError()) |wsa| { + return wsa.toE(); + } + + return .SUCCESS; } -pub fn getFreeMemory() usize { - return uv.uv_get_free_memory(); -} - -pub fn getSystemLoadavg() [3]f32 { - // loadavg is not supported on windows even in node - return .{ 0, 0, 0 }; -} - -pub const Mode = u16; -const Win32Error = bun.windows.Win32Error; - -// The way we do errors in Bun needs to get cleaned up. -// This is way too complicated. -// The problem is because we use libc in some cases and we use zig's std lib in other places and other times we go direct. -// So we end up with a lot of redundant code. pub const SystemErrno = enum(u16) { SUCCESS = 0, EPERM = 1, @@ -849,554 +1084,77 @@ pub const SystemErrno = enum(u16) { } }; -pub const UV_E2BIG = -uv.UV_E2BIG; -pub const UV_EACCES = -uv.UV_EACCES; -pub const UV_EADDRINUSE = -uv.UV_EADDRINUSE; -pub const UV_EADDRNOTAVAIL = -uv.UV_EADDRNOTAVAIL; -pub const UV_EAFNOSUPPORT = -uv.UV_EAFNOSUPPORT; -pub const UV_EAGAIN = -uv.UV_EAGAIN; -pub const UV_EALREADY = -uv.UV_EALREADY; -pub const UV_EBADF = -uv.UV_EBADF; -pub const UV_EBUSY = -uv.UV_EBUSY; -pub const UV_ECANCELED = -uv.UV_ECANCELED; -pub const UV_ECHARSET = -uv.UV_ECHARSET; -pub const UV_ECONNABORTED = -uv.UV_ECONNABORTED; -pub const UV_ECONNREFUSED = -uv.UV_ECONNREFUSED; -pub const UV_ECONNRESET = -uv.UV_ECONNRESET; -pub const UV_EDESTADDRREQ = -uv.UV_EDESTADDRREQ; -pub const UV_EEXIST = -uv.UV_EEXIST; -pub const UV_EFAULT = -uv.UV_EFAULT; -pub const UV_EHOSTUNREACH = -uv.UV_EHOSTUNREACH; -pub const UV_EINTR = -uv.UV_EINTR; -pub const UV_EINVAL = -uv.UV_EINVAL; -pub const UV_EIO = -uv.UV_EIO; -pub const UV_EISCONN = -uv.UV_EISCONN; -pub const UV_EISDIR = -uv.UV_EISDIR; -pub const UV_ELOOP = -uv.UV_ELOOP; -pub const UV_EMFILE = -uv.UV_EMFILE; -pub const UV_EMSGSIZE = -uv.UV_EMSGSIZE; -pub const UV_ENAMETOOLONG = -uv.UV_ENAMETOOLONG; -pub const UV_ENETDOWN = -uv.UV_ENETDOWN; -pub const UV_ENETUNREACH = -uv.UV_ENETUNREACH; -pub const UV_ENFILE = -uv.UV_ENFILE; -pub const UV_ENOBUFS = -uv.UV_ENOBUFS; -pub const UV_ENODEV = -uv.UV_ENODEV; -pub const UV_ENOENT = -uv.UV_ENOENT; -pub const UV_ENOMEM = -uv.UV_ENOMEM; -pub const UV_ENONET = -uv.UV_ENONET; -pub const UV_ENOSPC = -uv.UV_ENOSPC; -pub const UV_ENOSYS = -uv.UV_ENOSYS; -pub const UV_ENOTCONN = -uv.UV_ENOTCONN; -pub const UV_ENOTDIR = -uv.UV_ENOTDIR; -pub const UV_ENOTEMPTY = -uv.UV_ENOTEMPTY; -pub const UV_ENOTSOCK = -uv.UV_ENOTSOCK; -pub const UV_ENOTSUP = -uv.UV_ENOTSUP; -pub const UV_EPERM = -uv.UV_EPERM; -pub const UV_EPIPE = -uv.UV_EPIPE; -pub const UV_EPROTO = -uv.UV_EPROTO; -pub const UV_EPROTONOSUPPORT = -uv.UV_EPROTONOSUPPORT; -pub const UV_EPROTOTYPE = -uv.UV_EPROTOTYPE; -pub const UV_EROFS = -uv.UV_EROFS; -pub const UV_ESHUTDOWN = -uv.UV_ESHUTDOWN; -pub const UV_ESPIPE = -uv.UV_ESPIPE; -pub const UV_ESRCH = -uv.UV_ESRCH; -pub const UV_ETIMEDOUT = -uv.UV_ETIMEDOUT; -pub const UV_ETXTBSY = -uv.UV_ETXTBSY; -pub const UV_EXDEV = -uv.UV_EXDEV; -pub const UV_EFBIG = -uv.UV_EFBIG; -pub const UV_ENOPROTOOPT = -uv.UV_ENOPROTOOPT; -pub const UV_ERANGE = -uv.UV_ERANGE; -pub const UV_ENXIO = -uv.UV_ENXIO; -pub const UV_EMLINK = -uv.UV_EMLINK; -pub const UV_EHOSTDOWN = -uv.UV_EHOSTDOWN; -pub const UV_EREMOTEIO = -uv.UV_EREMOTEIO; -pub const UV_ENOTTY = -uv.UV_ENOTTY; -pub const UV_EFTYPE = -uv.UV_EFTYPE; -pub const UV_EILSEQ = -uv.UV_EILSEQ; -pub const UV_EOVERFLOW = -uv.UV_EOVERFLOW; -pub const UV_ESOCKTNOSUPPORT = -uv.UV_ESOCKTNOSUPPORT; -pub const UV_ENODATA = -uv.UV_ENODATA; -pub const UV_EUNATCH = -uv.UV_EUNATCH; - -pub const off_t = i64; -pub fn preallocate_file(_: posix.fd_t, _: off_t, _: off_t) !void {} - -const uv = @import("./deps/libuv.zig"); - -pub const E = enum(u16) { - SUCCESS = 0, - PERM = 1, - NOENT = 2, - SRCH = 3, - INTR = 4, - IO = 5, - NXIO = 6, - @"2BIG" = 7, - NOEXEC = 8, - BADF = 9, - CHILD = 10, - AGAIN = 11, - NOMEM = 12, - ACCES = 13, - FAULT = 14, - NOTBLK = 15, - BUSY = 16, - EXIST = 17, - XDEV = 18, - NODEV = 19, - NOTDIR = 20, - ISDIR = 21, - INVAL = 22, - NFILE = 23, - MFILE = 24, - NOTTY = 25, - TXTBSY = 26, - FBIG = 27, - NOSPC = 28, - SPIPE = 29, - ROFS = 30, - MLINK = 31, - PIPE = 32, - DOM = 33, - RANGE = 34, - DEADLK = 35, - NAMETOOLONG = 36, - NOLCK = 37, - NOSYS = 38, - NOTEMPTY = 39, - LOOP = 40, - WOULDBLOCK = 41, - NOMSG = 42, - IDRM = 43, - CHRNG = 44, - L2NSYNC = 45, - L3HLT = 46, - L3RST = 47, - LNRNG = 48, - UNATCH = 49, - NOCSI = 50, - L2HLT = 51, - BADE = 52, - BADR = 53, - XFULL = 54, - NOANO = 55, - BADRQC = 56, - BADSLT = 57, - DEADLOCK = 58, - BFONT = 59, - NOSTR = 60, - NODATA = 61, - TIME = 62, - NOSR = 63, - NONET = 64, - NOPKG = 65, - REMOTE = 66, - NOLINK = 67, - ADV = 68, - SRMNT = 69, - COMM = 70, - PROTO = 71, - MULTIHOP = 72, - DOTDOT = 73, - BADMSG = 74, - OVERFLOW = 75, - NOTUNIQ = 76, - BADFD = 77, - REMCHG = 78, - LIBACC = 79, - LIBBAD = 80, - LIBSCN = 81, - LIBMAX = 82, - LIBEXEC = 83, - ILSEQ = 84, - RESTART = 85, - STRPIPE = 86, - USERS = 87, - NOTSOCK = 88, - DESTADDRREQ = 89, - MSGSIZE = 90, - PROTOTYPE = 91, - NOPROTOOPT = 92, - PROTONOSUPPORT = 93, - SOCKTNOSUPPORT = 94, - NOTSUP = 95, - PFNOSUPPORT = 96, - AFNOSUPPORT = 97, - ADDRINUSE = 98, - ADDRNOTAVAIL = 99, - NETDOWN = 100, - NETUNREACH = 101, - NETRESET = 102, - CONNABORTED = 103, - CONNRESET = 104, - NOBUFS = 105, - ISCONN = 106, - NOTCONN = 107, - SHUTDOWN = 108, - TOOMANYREFS = 109, - TIMEDOUT = 110, - CONNREFUSED = 111, - HOSTDOWN = 112, - HOSTUNREACH = 113, - ALREADY = 114, - INPROGRESS = 115, - STALE = 116, - UCLEAN = 117, - NOTNAM = 118, - NAVAIL = 119, - ISNAM = 120, - REMOTEIO = 121, - DQUOT = 122, - NOMEDIUM = 123, - MEDIUMTYPE = 124, - CANCELED = 125, - NOKEY = 126, - KEYEXPIRED = 127, - KEYREVOKED = 128, - KEYREJECTED = 129, - OWNERDEAD = 130, - NOTRECOVERABLE = 131, - RFKILL = 132, - HWPOISON = 133, - UNKNOWN = 134, - CHARSET = 135, - EOF = 136, - - UV_E2BIG = -uv.UV_E2BIG, - UV_EACCES = -uv.UV_EACCES, - UV_EADDRINUSE = -uv.UV_EADDRINUSE, - UV_EADDRNOTAVAIL = -uv.UV_EADDRNOTAVAIL, - UV_EAFNOSUPPORT = -uv.UV_EAFNOSUPPORT, - UV_EAGAIN = -uv.UV_EAGAIN, - UV_EAI_ADDRFAMILY = -uv.UV_EAI_ADDRFAMILY, - UV_EAI_AGAIN = -uv.UV_EAI_AGAIN, - UV_EAI_BADFLAGS = -uv.UV_EAI_BADFLAGS, - UV_EAI_BADHINTS = -uv.UV_EAI_BADHINTS, - UV_EAI_CANCELED = -uv.UV_EAI_CANCELED, - UV_EAI_FAIL = -uv.UV_EAI_FAIL, - UV_EAI_FAMILY = -uv.UV_EAI_FAMILY, - UV_EAI_MEMORY = -uv.UV_EAI_MEMORY, - UV_EAI_NODATA = -uv.UV_EAI_NODATA, - UV_EAI_NONAME = -uv.UV_EAI_NONAME, - UV_EAI_OVERFLOW = -uv.UV_EAI_OVERFLOW, - UV_EAI_PROTOCOL = -uv.UV_EAI_PROTOCOL, - UV_EAI_SERVICE = -uv.UV_EAI_SERVICE, - UV_EAI_SOCKTYPE = -uv.UV_EAI_SOCKTYPE, - UV_EALREADY = -uv.UV_EALREADY, - UV_EBADF = -uv.UV_EBADF, - UV_EBUSY = -uv.UV_EBUSY, - UV_ECANCELED = -uv.UV_ECANCELED, - UV_ECHARSET = -uv.UV_ECHARSET, - UV_ECONNABORTED = -uv.UV_ECONNABORTED, - UV_ECONNREFUSED = -uv.UV_ECONNREFUSED, - UV_ECONNRESET = -uv.UV_ECONNRESET, - UV_EDESTADDRREQ = -uv.UV_EDESTADDRREQ, - UV_EEXIST = -uv.UV_EEXIST, - UV_EFAULT = -uv.UV_EFAULT, - UV_EFBIG = -uv.UV_EFBIG, - UV_EHOSTUNREACH = -uv.UV_EHOSTUNREACH, - UV_EINVAL = -uv.UV_EINVAL, - UV_EINTR = -uv.UV_EINTR, - UV_EISCONN = -uv.UV_EISCONN, - UV_EIO = -uv.UV_EIO, - UV_ELOOP = -uv.UV_ELOOP, - UV_EISDIR = -uv.UV_EISDIR, - UV_EMSGSIZE = -uv.UV_EMSGSIZE, - UV_EMFILE = -uv.UV_EMFILE, - UV_ENETDOWN = -uv.UV_ENETDOWN, - UV_ENAMETOOLONG = -uv.UV_ENAMETOOLONG, - UV_ENFILE = -uv.UV_ENFILE, - UV_ENETUNREACH = -uv.UV_ENETUNREACH, - UV_ENODEV = -uv.UV_ENODEV, - UV_ENOBUFS = -uv.UV_ENOBUFS, - UV_ENOMEM = -uv.UV_ENOMEM, - UV_ENOENT = -uv.UV_ENOENT, - UV_ENOPROTOOPT = -uv.UV_ENOPROTOOPT, - UV_ENONET = -uv.UV_ENONET, - UV_ENOSYS = -uv.UV_ENOSYS, - UV_ENOSPC = -uv.UV_ENOSPC, - UV_ENOTDIR = -uv.UV_ENOTDIR, - UV_ENOTCONN = -uv.UV_ENOTCONN, - UV_ENOTSOCK = -uv.UV_ENOTSOCK, - UV_ENOTEMPTY = -uv.UV_ENOTEMPTY, - UV_EOVERFLOW = -uv.UV_EOVERFLOW, - UV_ENOTSUP = -uv.UV_ENOTSUP, - UV_EPIPE = -uv.UV_EPIPE, - UV_EPERM = -uv.UV_EPERM, - UV_EPROTONOSUPPORT = -uv.UV_EPROTONOSUPPORT, - UV_EPROTO = -uv.UV_EPROTO, - UV_ERANGE = -uv.UV_ERANGE, - UV_EPROTOTYPE = -uv.UV_EPROTOTYPE, - UV_ESHUTDOWN = -uv.UV_ESHUTDOWN, - UV_EROFS = -uv.UV_EROFS, - UV_ESRCH = -uv.UV_ESRCH, - UV_ESPIPE = -uv.UV_ESPIPE, - UV_ETXTBSY = -uv.UV_ETXTBSY, - UV_ETIMEDOUT = -uv.UV_ETIMEDOUT, - UV_UNKNOWN = -uv.UV_UNKNOWN, - UV_EXDEV = -uv.UV_EXDEV, - UV_ENXIO = -uv.UV_ENXIO, - UV_EOF = -uv.UV_EOF, - UV_EHOSTDOWN = -uv.UV_EHOSTDOWN, - UV_EMLINK = -uv.UV_EMLINK, - UV_ENOTTY = -uv.UV_ENOTTY, - UV_EREMOTEIO = -uv.UV_EREMOTEIO, - UV_EILSEQ = -uv.UV_EILSEQ, - UV_EFTYPE = -uv.UV_EFTYPE, - UV_ENODATA = -uv.UV_ENODATA, - UV_ESOCKTNOSUPPORT = -uv.UV_ESOCKTNOSUPPORT, - UV_ERRNO_MAX = -uv.UV_ERRNO_MAX, - UV_EUNATCH = -uv.UV_EUNATCH, +pub const UV_E = struct { + pub const @"2BIG" = -uv.UV_E2BIG; + pub const ACCES = -uv.UV_EACCES; + pub const ADDRINUSE = -uv.UV_EADDRINUSE; + pub const ADDRNOTAVAIL = -uv.UV_EADDRNOTAVAIL; + pub const AFNOSUPPORT = -uv.UV_EAFNOSUPPORT; + pub const AGAIN = -uv.UV_EAGAIN; + pub const ALREADY = -uv.UV_EALREADY; + pub const BADF = -uv.UV_EBADF; + pub const BUSY = -uv.UV_EBUSY; + pub const CANCELED = -uv.UV_ECANCELED; + pub const CHARSET = -uv.UV_ECHARSET; + pub const CONNABORTED = -uv.UV_ECONNABORTED; + pub const CONNREFUSED = -uv.UV_ECONNREFUSED; + pub const CONNRESET = -uv.UV_ECONNRESET; + pub const DESTADDRREQ = -uv.UV_EDESTADDRREQ; + pub const EXIST = -uv.UV_EEXIST; + pub const FAULT = -uv.UV_EFAULT; + pub const HOSTUNREACH = -uv.UV_EHOSTUNREACH; + pub const INTR = -uv.UV_EINTR; + pub const INVAL = -uv.UV_EINVAL; + pub const IO = -uv.UV_EIO; + pub const ISCONN = -uv.UV_EISCONN; + pub const ISDIR = -uv.UV_EISDIR; + pub const LOOP = -uv.UV_ELOOP; + pub const MFILE = -uv.UV_EMFILE; + pub const MSGSIZE = -uv.UV_EMSGSIZE; + pub const NAMETOOLONG = -uv.UV_ENAMETOOLONG; + pub const NETDOWN = -uv.UV_ENETDOWN; + pub const NETUNREACH = -uv.UV_ENETUNREACH; + pub const NFILE = -uv.UV_ENFILE; + pub const NOBUFS = -uv.UV_ENOBUFS; + pub const NODEV = -uv.UV_ENODEV; + pub const NOENT = -uv.UV_ENOENT; + pub const NOMEM = -uv.UV_ENOMEM; + pub const NONET = -uv.UV_ENONET; + pub const NOSPC = -uv.UV_ENOSPC; + pub const NOSYS = -uv.UV_ENOSYS; + pub const NOTCONN = -uv.UV_ENOTCONN; + pub const NOTDIR = -uv.UV_ENOTDIR; + pub const NOTEMPTY = -uv.UV_ENOTEMPTY; + pub const NOTSOCK = -uv.UV_ENOTSOCK; + pub const NOTSUP = -uv.UV_ENOTSUP; + pub const PERM = -uv.UV_EPERM; + pub const PIPE = -uv.UV_EPIPE; + pub const PROTO = -uv.UV_EPROTO; + pub const PROTONOSUPPORT = -uv.UV_EPROTONOSUPPORT; + pub const PROTOTYPE = -uv.UV_EPROTOTYPE; + pub const ROFS = -uv.UV_EROFS; + pub const SHUTDOWN = -uv.UV_ESHUTDOWN; + pub const SPIPE = -uv.UV_ESPIPE; + pub const SRCH = -uv.UV_ESRCH; + pub const TIMEDOUT = -uv.UV_ETIMEDOUT; + pub const TXTBSY = -uv.UV_ETXTBSY; + pub const XDEV = -uv.UV_EXDEV; + pub const FBIG = -uv.UV_EFBIG; + pub const NOPROTOOPT = -uv.UV_ENOPROTOOPT; + pub const RANGE = -uv.UV_ERANGE; + pub const NXIO = -uv.UV_ENXIO; + pub const MLINK = -uv.UV_EMLINK; + pub const HOSTDOWN = -uv.UV_EHOSTDOWN; + pub const REMOTEIO = -uv.UV_EREMOTEIO; + pub const NOTTY = -uv.UV_ENOTTY; + pub const FTYPE = -uv.UV_EFTYPE; + pub const ILSEQ = -uv.UV_EILSEQ; + pub const OVERFLOW = -uv.UV_EOVERFLOW; + pub const SOCKTNOSUPPORT = -uv.UV_ESOCKTNOSUPPORT; + pub const NODATA = -uv.UV_ENODATA; + pub const UNATCH = -uv.UV_EUNATCH; }; - -pub const S = struct { - pub const IFMT = 0o170000; - - pub const IFDIR = 0o040000; - pub const IFCHR = 0o020000; - pub const IFBLK = 0o060000; - pub const IFREG = 0o100000; - pub const IFIFO = 0o010000; - pub const IFLNK = 0o120000; - pub const IFSOCK = 0o140000; - - pub const ISUID = 0o4000; - pub const ISGID = 0o2000; - pub const ISVTX = 0o1000; - pub const IRUSR = 0o400; - pub const IWUSR = 0o200; - pub const IXUSR = 0o100; - pub const IRWXU = 0o700; - pub const IRGRP = 0o040; - pub const IWGRP = 0o020; - pub const IXGRP = 0o010; - pub const IRWXG = 0o070; - pub const IROTH = 0o004; - pub const IWOTH = 0o002; - pub const IXOTH = 0o001; - pub const IRWXO = 0o007; - - pub inline fn ISREG(m: i32) bool { - return m & IFMT == IFREG; - } - - pub inline fn ISDIR(m: i32) bool { - return m & IFMT == IFDIR; - } - - pub inline fn ISCHR(m: i32) bool { - return m & IFMT == IFCHR; - } - - pub inline fn ISBLK(m: i32) bool { - return m & IFMT == IFBLK; - } - - pub inline fn ISFIFO(m: i32) bool { - return m & IFMT == IFIFO; - } - - pub inline fn ISLNK(m: i32) bool { - return m & IFMT == IFLNK; - } - - pub inline fn ISSOCK(m: i32) bool { - return m & IFMT == IFSOCK; - } -}; - -pub fn getErrno(_: anytype) E { - if (Win32Error.get().toSystemErrno()) |sys| { - return sys.toE(); - } - - if (bun.windows.WSAGetLastError()) |wsa| { - return wsa.toE(); - } - return .SUCCESS; -} - -const Maybe = bun.JSC.Maybe; - -const w = std.os.windows; - -extern "c" fn _umask(Mode) Mode; -pub const umask = _umask; - -/// Derived from std.os.windows.renameAtW -/// Allows more errors -pub fn renameAtW( - old_dir_fd: bun.FileDescriptor, - old_path_w: []const u16, - new_dir_fd: bun.FileDescriptor, - new_path_w: []const u16, - replace_if_exists: bool, -) Maybe(void) { - const src_fd = brk: { - switch (bun.sys.openFileAtWindows( - old_dir_fd, - old_path_w, - .{ - .access_mask = w.SYNCHRONIZE | w.GENERIC_WRITE | w.DELETE | w.FILE_TRAVERSE, - .disposition = w.FILE_OPEN, - .options = w.FILE_SYNCHRONOUS_IO_NONALERT | w.FILE_OPEN_REPARSE_POINT, - }, - )) { - .err => { - // retry, wtihout FILE_TRAVERSE flag - switch (bun.sys.openFileAtWindows( - old_dir_fd, - old_path_w, - .{ - .access_mask = w.SYNCHRONIZE | w.GENERIC_WRITE | w.DELETE, - .disposition = w.FILE_OPEN, - .options = w.FILE_SYNCHRONOUS_IO_NONALERT | w.FILE_OPEN_REPARSE_POINT, - }, - )) { - .err => |err2| return .{ .err = err2 }, - .result => |fd| break :brk fd, - } - }, - .result => |fd| break :brk fd, - } - }; - defer src_fd.close(); - - return moveOpenedFileAt(src_fd, new_dir_fd, new_path_w, replace_if_exists); -} - -const log = bun.sys.syslog; - -/// With an open file source_fd, move it into the directory new_dir_fd with the name new_path_w. -/// Does not close the file descriptor. -/// -/// For this to succeed -/// - source_fd must have been opened with access_mask=w.DELETE -/// - new_path_w must be the name of a file. it cannot be a path relative to new_dir_fd. see moveOpenedFileAtLoose -pub fn moveOpenedFileAt( - src_fd: bun.FileDescriptor, - new_dir_fd: bun.FileDescriptor, - new_file_name: []const u16, - replace_if_exists: bool, -) Maybe(void) { - // FILE_RENAME_INFORMATION_EX and FILE_RENAME_POSIX_SEMANTICS require >= win10_rs1, - // but FILE_RENAME_IGNORE_READONLY_ATTRIBUTE requires >= win10_rs5. We check >= rs5 here - // so that we only use POSIX_SEMANTICS when we know IGNORE_READONLY_ATTRIBUTE will also be - // supported in order to avoid either (1) using a redundant call that we can know in advance will return - // STATUS_NOT_SUPPORTED or (2) only setting IGNORE_READONLY_ATTRIBUTE when >= rs5 - // and therefore having different behavior when the Windows version is >= rs1 but < rs5. - comptime bun.assert(builtin.target.os.version_range.windows.min.isAtLeast(.win10_rs5)); - - if (bun.Environment.allow_assert) { - bun.assert(std.mem.indexOfScalar(u16, new_file_name, '/') == null); // Call moveOpenedFileAtLoose - } - - const struct_buf_len = @sizeOf(w.FILE_RENAME_INFORMATION_EX) + (bun.MAX_PATH_BYTES - 1); - var rename_info_buf: [struct_buf_len]u8 align(@alignOf(w.FILE_RENAME_INFORMATION_EX)) = undefined; - - const struct_len = @sizeOf(w.FILE_RENAME_INFORMATION_EX) - 1 + new_file_name.len * 2; - if (struct_len > struct_buf_len) return Maybe(void).errno(bun.C.E.NAMETOOLONG, .NtSetInformationFile); - - const rename_info = @as(*w.FILE_RENAME_INFORMATION_EX, @ptrCast(&rename_info_buf)); - var io_status_block: w.IO_STATUS_BLOCK = undefined; - - var flags: w.ULONG = w.FILE_RENAME_POSIX_SEMANTICS | w.FILE_RENAME_IGNORE_READONLY_ATTRIBUTE; - if (replace_if_exists) flags |= w.FILE_RENAME_REPLACE_IF_EXISTS; - rename_info.* = .{ - .Flags = flags, - .RootDirectory = if (std.fs.path.isAbsoluteWindowsWTF16(new_file_name)) null else new_dir_fd.cast(), - .FileNameLength = @intCast(new_file_name.len * 2), // already checked error.NameTooLong - .FileName = undefined, - }; - @memcpy(@as([*]u16, &rename_info.FileName)[0..new_file_name.len], new_file_name); - const rc = w.ntdll.NtSetInformationFile( - src_fd.cast(), - &io_status_block, - rename_info, - @intCast(struct_len), // already checked for error.NameTooLong - .FileRenameInformationEx, - ); - log("moveOpenedFileAt({} ->> {} '{}', {s}) = {s}", .{ src_fd, new_dir_fd, bun.fmt.utf16(new_file_name), if (replace_if_exists) "replace_if_exists" else "no flag", @tagName(rc) }); - - if (bun.Environment.isDebug) { - if (rc == .ACCESS_DENIED) { - bun.Output.debugWarn("moveOpenedFileAt was called on a file descriptor without access_mask=w.DELETE", .{}); - } - } - - return if (rc == .SUCCESS) - Maybe(void).success - else - Maybe(void).errno(rc, .NtSetInformationFile); -} - -/// Same as moveOpenedFileAt but allows new_path to be a path relative to new_dir_fd. -/// -/// Aka: moveOpenedFileAtLoose(fd, dir, ".\\a\\relative\\not-normalized-path.txt", false); -pub fn moveOpenedFileAtLoose( - src_fd: bun.FileDescriptor, - new_dir_fd: bun.FileDescriptor, - new_path: []const u16, - replace_if_exists: bool, -) Maybe(void) { - bun.assert(std.mem.indexOfScalar(u16, new_path, '/') == null); // Call bun.strings.toWPathNormalized first - - const without_leading_dot_slash = if (new_path.len >= 2 and new_path[0] == '.' and new_path[1] == '\\') - new_path[2..] - else - new_path; - - if (std.mem.lastIndexOfScalar(u16, new_path, '\\')) |last_slash| { - const dirname = new_path[0..last_slash]; - const fd = switch (bun.sys.openDirAtWindows(new_dir_fd, dirname, .{ .can_rename_or_delete = true, .iterable = false })) { - .err => |e| return .{ .err = e }, - .result => |fd| fd, - }; - defer fd.close(); - - const basename = new_path[last_slash + 1 ..]; - return moveOpenedFileAt(src_fd, fd, basename, replace_if_exists); - } - - // easy mode - return moveOpenedFileAt(src_fd, new_dir_fd, without_leading_dot_slash, replace_if_exists); -} - -const FILE_DISPOSITION_DO_NOT_DELETE: w.ULONG = 0x00000000; -const FILE_DISPOSITION_DELETE: w.ULONG = 0x00000001; -const FILE_DISPOSITION_POSIX_SEMANTICS: w.ULONG = 0x00000002; -const FILE_DISPOSITION_FORCE_IMAGE_SECTION_CHECK: w.ULONG = 0x00000004; -const FILE_DISPOSITION_ON_CLOSE: w.ULONG = 0x00000008; -const FILE_DISPOSITION_IGNORE_READONLY_ATTRIBUTE: w.ULONG = 0x00000010; - -/// Extracted from standard library except this takes an open file descriptor -/// -/// NOTE: THE FILE MUST BE OPENED WITH ACCESS_MASK "DELETE" OR THIS WILL FAIL -pub fn deleteOpenedFile(fd: bun.FileDescriptor) Maybe(void) { - comptime bun.assert(builtin.target.os.version_range.windows.min.isAtLeast(.win10_rs5)); - var info = w.FILE_DISPOSITION_INFORMATION_EX{ - .Flags = FILE_DISPOSITION_DELETE | - FILE_DISPOSITION_POSIX_SEMANTICS | - FILE_DISPOSITION_IGNORE_READONLY_ATTRIBUTE, - }; - - var io: w.IO_STATUS_BLOCK = undefined; - const rc = w.ntdll.NtSetInformationFile( - fd.cast(), - &io, - &info, - @sizeOf(w.FILE_DISPOSITION_INFORMATION_EX), - .FileDispositionInformationEx, - ); - - log("deleteOpenedFile({}) = {s}", .{ fd, @tagName(rc) }); - - return if (rc == .SUCCESS) - Maybe(void).success - else - Maybe(void).errno(rc, .NtSetInformationFile); -} - -pub extern fn windows_enable_stdio_inheritance() void; - -pub extern "c" fn quick_exit(code: c_int) noreturn; +const std = @import("std"); +const bun = @import("bun"); +const uv = bun.windows.libuv; +const Win32Error = bun.windows.Win32Error; diff --git a/src/fd.zig b/src/fd.zig index 48630703f4..5d5e7e07bd 100644 --- a/src/fd.zig +++ b/src/fd.zig @@ -203,7 +203,7 @@ pub const FD = packed struct(backing_int) { maybe_windows_fd.close(); } return .{ .err = .{ - .errno = @intFromEnum(bun.C.E.MFILE), + .errno = @intFromEnum(bun.sys.E.MFILE), .syscall = syscall_tag, } }; }, @@ -252,14 +252,14 @@ pub const FD = packed struct(backing_int) { const result: ?bun.sys.Error = switch (os) { .linux => result: { bun.assert(fd.native() >= 0); - break :result switch (bun.C.getErrno(bun.sys.syscall.close(fd.native()))) { + break :result switch (bun.sys.getErrno(bun.sys.syscall.close(fd.native()))) { .BADF => .{ .errno = @intFromEnum(E.BADF), .syscall = .close, .fd = fd }, else => null, }; }, .mac => result: { bun.assert(fd.native() >= 0); - break :result switch (bun.C.getErrno(bun.sys.syscall.@"close$NOCANCEL"(fd.native()))) { + break :result switch (bun.sys.getErrno(bun.sys.syscall.@"close$NOCANCEL"(fd.native()))) { .BADF => .{ .errno = @intFromEnum(E.BADF), .syscall = .close, .fd = fd }, else => null, }; diff --git a/src/fs.zig b/src/fs.zig index 35d3c707bb..6bb4f9b978 100644 --- a/src/fs.zig +++ b/src/fs.zig @@ -11,7 +11,6 @@ const FileDescriptor = bun.FileDescriptor; const FeatureFlags = bun.FeatureFlags; const stringZ = bun.stringZ; const default_allocator = bun.default_allocator; -const C = bun.C; const sync = @import("sync.zig"); const Mutex = bun.Mutex; const Semaphore = sync.Semaphore; @@ -694,7 +693,7 @@ pub const FileSystem = struct { bun.assert(this.fd != bun.invalid_fd); bun.assert(this.dir_fd != bun.invalid_fd); - try C.moveFileZWithHandle(this.fd, this.dir_fd, bun.sliceTo(from_name, 0), bun.FD.cwd(), bun.sliceTo(name, 0)); + try bun.sys.moveFileZWithHandle(this.fd, this.dir_fd, bun.sliceTo(from_name, 0), bun.FD.cwd(), bun.sliceTo(name, 0)); this.close(); } @@ -1330,7 +1329,7 @@ pub const FileSystem = struct { ) !Entry.Cache { var outpath: bun.PathBuffer = undefined; - const stat = try C.lstat_absolute(absolute_path); + const stat = try bun.sys.lstat_absolute(absolute_path); const is_symlink = stat.kind == std.fs.File.Kind.SymLink; var _kind = stat.kind; var cache = Entry.Cache{ @@ -1439,7 +1438,7 @@ pub const FileSystem = struct { return cache; } - const stat = try C.lstat_absolute(absolute_path_c); + const stat = try bun.sys.lstat_absolute(absolute_path_c); const is_symlink = stat.kind == std.fs.File.Kind.sym_link; var file_kind = stat.kind; diff --git a/src/glob/GlobWalker.zig b/src/glob/GlobWalker.zig index 39c08beefb..30930be508 100644 --- a/src/glob/GlobWalker.zig +++ b/src/glob/GlobWalker.zig @@ -35,7 +35,6 @@ const Arena = std.heap.ArenaAllocator; const ArrayList = std.ArrayListUnmanaged; const ArrayListManaged = std.ArrayList; const BunString = bun.String; -const C = @import("../c.zig"); const CodepointIterator = @import("../string_immutable.zig").UnsignedCodepointIterator; const Codepoint = CodepointIterator.Cursor.CodePointType; const Dirent = @import("../bun.js/node/types.zig").Dirent; @@ -48,7 +47,7 @@ const PathLike = @import("../bun.js/node/types.zig").PathLike; const PathString = @import("../string_types.zig").PathString; const ResolvePath = @import("../resolver/resolve_path.zig"); const Syscall = bun.sys; -const ZigString = @import("../bun.js/bindings/bindings.zig").ZigString; +const ZigString = bun.JSC.ZigString; // const Codepoint = u32; const Cursor = CodepointIterator.Cursor; @@ -290,7 +289,7 @@ pub const DirEntryAccessor = struct { } // TODO do we want to propagate ENOTDIR through the 'Maybe' to match the SyscallAccessor? // The glob implementation specifically checks for this error when dealing with symlinks - // return .{ .err = Syscall.Error.fromCode(bun.C.E.NOTDIR, Syscall.Tag.open) }; + // return .{ .err = Syscall.Error.fromCode(bun.sys.E.NOTDIR, Syscall.Tag.open) }; const res = FS.instance.fs.readDirectory(path, null, 0, false) catch |err| { return err; }; @@ -472,12 +471,12 @@ pub fn GlobWalker_( const path = try this.walker.arena.allocator().dupeZ(u8, path_without_special_syntax); const fd = switch (try Accessor.open(path)) { .err => |e| { - if (e.getErrno() == bun.C.E.NOTDIR) { + if (e.getErrno() == bun.sys.E.NOTDIR) { this.iter_state = .{ .matched = path }; return Maybe(void).success; } // Doesn't exist - if (e.getErrno() == bun.C.E.NOENT) { + if (e.getErrno() == bun.sys.E.NOENT) { this.iter_state = .get_next; return Maybe(void).success; } @@ -666,7 +665,7 @@ pub fn GlobWalker_( const stat_result: bun.Stat = switch (Accessor.statat(fd, pathz)) { .err => |e_| { var e: bun.sys.Error = e_; - if (e.getErrno() == bun.C.E.NOENT) { + if (e.getErrno() == .NOENT) { this.iter_state = .get_next; return Maybe(void).success; } @@ -740,7 +739,7 @@ pub fn GlobWalker_( this.iter_state = .get_next; const maybe_dir_fd: ?Accessor.Handle = switch (try Accessor.openat(this.cwd_fd, symlink_full_path_z)) { .err => |err| brk: { - if (@as(usize, @intCast(err.errno)) == @as(usize, @intFromEnum(bun.C.E.NOTDIR))) { + if (@as(usize, @intCast(err.errno)) == @as(usize, @intFromEnum(bun.sys.E.NOTDIR))) { break :brk null; } if (this.walker.error_on_broken_symlinks) return .{ .err = this.walker.handleSysErrWithPath(err, symlink_full_path_z) }; diff --git a/src/http.zig b/src/http.zig index 8d6a0f0d3d..87f5d45929 100644 --- a/src/http.zig +++ b/src/http.zig @@ -9,7 +9,7 @@ const strings = bun.strings; const MutableString = bun.MutableString; const FeatureFlags = bun.FeatureFlags; const stringZ = bun.stringZ; -const C = bun.C; + const Loc = bun.logger.Loc; const Log = bun.logger.Log; const DotEnv = @import("./env_loader.zig"); @@ -34,7 +34,8 @@ const Progress = bun.Progress; const X509 = @import("./bun.js/api/bun/x509.zig"); const SSLConfig = @import("./bun.js/api/server.zig").ServerConfig.SSLConfig; const SSLWrapper = @import("./bun.js/api/bun/ssl_wrapper.zig").SSLWrapper; - +const Blob = bun.webcore.Blob; +const FetchHeaders = bun.webcore.FetchHeaders; const URLBufferPool = ObjectPool([8192]u8, null, false, 10); const uws = bun.uws; pub const MimeType = @import("./http/mime_type.zig"); @@ -177,7 +178,7 @@ pub const Sendfile = struct { std.os.linux.sendfile(socket.fd().cast(), this.fd.cast(), &signed_offset, this.remain); this.offset = @as(u64, @intCast(signed_offset)); - const errcode = bun.C.getErrno(val); + const errcode = bun.sys.getErrno(val); this.remain -|= @as(u64, @intCast(this.offset -| begin)); @@ -191,7 +192,7 @@ pub const Sendfile = struct { } else if (Environment.isPosix) { var sbytes: std.posix.off_t = adjusted_count; const signed_offset = @as(i64, @bitCast(@as(u64, this.offset))); - const errcode = bun.C.getErrno(std.c.sendfile( + const errcode = bun.sys.getErrno(std.c.sendfile( this.fd.cast(), socket.fd().cast(), signed_offset, @@ -1779,8 +1780,6 @@ pub inline fn cleanup(force: bool) void { default_arena.gc(force); } -pub const Headers = JSC.WebCore.Headers; - pub const SOCKET_FLAGS: u32 = if (Environment.isLinux) SOCK.CLOEXEC | posix.MSG.NOSIGNAL else @@ -4674,3 +4673,167 @@ const ThreadlocalAsyncHTTP = struct { async_http: AsyncHTTP, }; + +pub const Headers = struct { + pub const Entry = struct { + name: Api.StringPointer, + value: Api.StringPointer, + + pub const List = bun.MultiArrayList(Entry); + }; + + entries: Entry.List = .{}, + buf: std.ArrayListUnmanaged(u8) = .{}, + allocator: std.mem.Allocator, + + pub fn memoryCost(this: *const Headers) usize { + return this.buf.items.len + this.entries.memoryCost(); + } + + pub fn clone(this: *Headers) !Headers { + return Headers{ + .entries = try this.entries.clone(this.allocator), + .buf = try this.buf.clone(this.allocator), + .allocator = this.allocator, + }; + } + + pub fn append(this: *Headers, name: []const u8, value: []const u8) !void { + var offset: u32 = @truncate(this.buf.items.len); + try this.buf.ensureUnusedCapacity(this.allocator, name.len + value.len); + const name_ptr = Api.StringPointer{ + .offset = offset, + .length = @truncate(name.len), + }; + this.buf.appendSliceAssumeCapacity(name); + offset = @truncate(this.buf.items.len); + this.buf.appendSliceAssumeCapacity(value); + + const value_ptr = Api.StringPointer{ + .offset = offset, + .length = @truncate(value.len), + }; + try this.entries.append(this.allocator, .{ + .name = name_ptr, + .value = value_ptr, + }); + } + + pub fn deinit(this: *Headers) void { + this.entries.deinit(this.allocator); + this.buf.clearAndFree(this.allocator); + } + pub fn getContentType(this: *const Headers) ?[]const u8 { + if (this.entries.len == 0 or this.buf.items.len == 0) { + return null; + } + const header_entries = this.entries.slice(); + const header_names = header_entries.items(.name); + const header_values = header_entries.items(.value); + + for (header_names, 0..header_names.len) |name, i| { + if (bun.strings.eqlCaseInsensitiveASCII(this.asStr(name), "content-type", true)) { + return this.asStr(header_values[i]); + } + } + return null; + } + pub fn asStr(this: *const Headers, ptr: Api.StringPointer) []const u8 { + return if (ptr.offset + ptr.length <= this.buf.items.len) + this.buf.items[ptr.offset..][0..ptr.length] + else + ""; + } + + pub const Options = struct { + body: ?*const Blob.Any = null, + }; + + pub fn fromPicoHttpHeaders(headers: []const picohttp.Header, allocator: std.mem.Allocator) !Headers { + const header_count = headers.len; + var result = Headers{ + .entries = .{}, + .buf = .{}, + .allocator = allocator, + }; + + var buf_len: usize = 0; + for (headers) |header| { + buf_len += header.name.len + header.value.len; + } + result.entries.ensureTotalCapacity(allocator, header_count) catch bun.outOfMemory(); + result.entries.len = headers.len; + result.buf.ensureTotalCapacityPrecise(allocator, buf_len) catch bun.outOfMemory(); + result.buf.items.len = buf_len; + var offset: u32 = 0; + for (headers, 0..headers.len) |header, i| { + const name_offset = offset; + bun.copy(u8, result.buf.items[offset..][0..header.name.len], header.name); + offset += @truncate(header.name.len); + const value_offset = offset; + bun.copy(u8, result.buf.items[offset..][0..header.value.len], header.value); + offset += @truncate(header.value.len); + + result.entries.set(i, .{ + .name = .{ + .offset = name_offset, + .length = @truncate(header.name.len), + }, + .value = .{ + .offset = value_offset, + .length = @truncate(header.value.len), + }, + }); + } + return result; + } + + pub fn from(fetch_headers_ref: ?*FetchHeaders, allocator: std.mem.Allocator, options: Options) !Headers { + var header_count: u32 = 0; + var buf_len: u32 = 0; + if (fetch_headers_ref) |headers_ref| + headers_ref.count(&header_count, &buf_len); + var headers = Headers{ + .entries = .{}, + .buf = .{}, + .allocator = allocator, + }; + const buf_len_before_content_type = buf_len; + const needs_content_type = brk: { + if (options.body) |body| { + if (body.hasContentTypeFromUser() and (fetch_headers_ref == null or !fetch_headers_ref.?.fastHas(.ContentType))) { + header_count += 1; + buf_len += @as(u32, @truncate(body.contentType().len + "Content-Type".len)); + break :brk true; + } + } + break :brk false; + }; + headers.entries.ensureTotalCapacity(allocator, header_count) catch bun.outOfMemory(); + headers.entries.len = header_count; + headers.buf.ensureTotalCapacityPrecise(allocator, buf_len) catch bun.outOfMemory(); + headers.buf.items.len = buf_len; + var sliced = headers.entries.slice(); + var names = sliced.items(.name); + var values = sliced.items(.value); + if (fetch_headers_ref) |headers_ref| + headers_ref.copyTo(names.ptr, values.ptr, headers.buf.items.ptr); + + // TODO: maybe we should send Content-Type header first instead of last? + if (needs_content_type) { + bun.copy(u8, headers.buf.items[buf_len_before_content_type..], "Content-Type"); + names[header_count - 1] = .{ + .offset = buf_len_before_content_type, + .length = "Content-Type".len, + }; + + bun.copy(u8, headers.buf.items[buf_len_before_content_type + "Content-Type".len ..], options.body.?.contentType()); + values[header_count - 1] = .{ + .offset = buf_len_before_content_type + @as(u32, "Content-Type".len), + .length = @as(u32, @truncate(options.body.?.contentType().len)), + }; + } + + return headers; + } +}; diff --git a/src/http/header_builder.zig b/src/http/header_builder.zig index eb365bb8e8..9a775eefa3 100644 --- a/src/http/header_builder.zig +++ b/src/http/header_builder.zig @@ -1,6 +1,6 @@ const HeaderBuilder = @This(); const StringBuilder = bun.StringBuilder; -const Headers = bun.JSC.WebCore.Headers; +const Headers = bun.http.Headers; const string = bun.string; const HTTPClient = @import("../http.zig"); const Api = @import("../api/schema.zig").Api; diff --git a/src/http/method.zig b/src/http/method.zig index e6391c3242..b28768f3ac 100644 --- a/src/http/method.zig +++ b/src/http/method.zig @@ -7,7 +7,7 @@ const strings = bun.strings; const MutableString = bun.MutableString; const stringZ = bun.stringZ; const default_allocator = bun.default_allocator; -const C = bun.C; + const std = @import("std"); pub const Method = enum(u8) { diff --git a/src/http/mime_type.zig b/src/http/mime_type.zig index 67d75b5fa7..152861be21 100644 --- a/src/http/mime_type.zig +++ b/src/http/mime_type.zig @@ -8,7 +8,6 @@ const strings = bun.strings; const MutableString = bun.MutableString; const stringZ = bun.stringZ; const default_allocator = bun.default_allocator; -const C = bun.C; const Loader = @import("../options.zig").Loader; const ComptimeStringMap = bun.ComptimeStringMap; diff --git a/src/http/url_path.zig b/src/http/url_path.zig index 5085c87d55..83a33da454 100644 --- a/src/http/url_path.zig +++ b/src/http/url_path.zig @@ -7,7 +7,6 @@ const strings = bun.strings; const MutableString = bun.MutableString; const stringZ = bun.stringZ; const default_allocator = bun.default_allocator; -const C = bun.C; const PercentEncoding = @import("../url.zig").PercentEncoding; const std = @import("std"); diff --git a/src/http/websocket.zig b/src/http/websocket.zig index 5029c3090b..0c1d01c541 100644 --- a/src/http/websocket.zig +++ b/src/http/websocket.zig @@ -12,7 +12,6 @@ const strings = bun.strings; const MutableString = bun.MutableString; const stringZ = bun.stringZ; const default_allocator = bun.default_allocator; -const C = bun.C; pub const Opcode = enum(u4) { Continue = 0x0, diff --git a/src/http/websocket_http_client.zig b/src/http/websocket_http_client.zig index 04dd6d1642..847155dd0f 100644 --- a/src/http/websocket_http_client.zig +++ b/src/http/websocket_http_client.zig @@ -11,7 +11,7 @@ const strings = bun.strings; const MutableString = bun.MutableString; const stringZ = bun.stringZ; const default_allocator = bun.default_allocator; -const C = bun.C; + const BoringSSL = bun.BoringSSL; const uws = bun.uws; const JSC = bun.JSC; diff --git a/src/install/bin.zig b/src/install/bin.zig index 27fa700999..f58dfe8dd3 100644 --- a/src/install/bin.zig +++ b/src/install/bin.zig @@ -7,7 +7,6 @@ const Global = bun.Global; const std = @import("std"); const strings = bun.strings; const Environment = @import("../env.zig"); -const C = @import("../c.zig"); const Fs = @import("../fs.zig"); const stringZ = bun.stringZ; const Resolution = @import("./resolution.zig").Resolution; @@ -586,14 +585,14 @@ pub const Bin = extern struct { err: ?anyerror = null, - pub var umask: bun.C.Mode = 0; + pub var umask: bun.Mode = 0; var has_set_umask = false; pub fn ensureUmask() void { if (!has_set_umask) { has_set_umask = true; - umask = bun.C.umask(0); + umask = bun.sys.umask(0); } } diff --git a/src/install/extract_tarball.zig b/src/install/extract_tarball.zig index 21c8cb9aeb..2c7e73056c 100644 --- a/src/install/extract_tarball.zig +++ b/src/install/extract_tarball.zig @@ -344,7 +344,7 @@ fn extract(this: *const ExtractTarball, tgz_bytes: []const u8) !Install.ExtractD return error.InstallFailed; }; - switch (bun.C.moveOpenedFileAt(dir_to_move, .fromStdDir(cache_dir), path_to_use, true)) { + switch (bun.windows.moveOpenedFileAt(dir_to_move, .fromStdDir(cache_dir), path_to_use, true)) { .err => |err| { if (!did_retry) { switch (err.getErrno()) { diff --git a/src/install/install.zig b/src/install/install.zig index 8b0e7940d6..bccc864c0b 100644 --- a/src/install/install.zig +++ b/src/install/install.zig @@ -16,7 +16,6 @@ const strings = bun.strings; const MutableString = bun.MutableString; const stringZ = bun.stringZ; const default_allocator = bun.default_allocator; -const C = bun.C; const std = @import("std"); const uws = @import("../deps/uws.zig"); const JSC = bun.JSC; @@ -1411,7 +1410,7 @@ pub fn NewPackageInstall(comptime kind: PkgInstallKind) type { stackpath[entry.path.len] = 0; const path: [:0]u8 = stackpath[0..entry.path.len :0]; const basename: [:0]u8 = stackpath[entry.path.len - entry.basename.len .. entry.path.len :0]; - switch (C.clonefileat( + switch (bun.c.clonefileat( entry.dir.fd, basename, destination_dir_.fd, @@ -1464,7 +1463,7 @@ pub fn NewPackageInstall(comptime kind: PkgInstallKind) type { } } - return switch (C.clonefileat( + return switch (bun.c.clonefileat( this.cache_dir.fd, this.cache_dir_subpath, destination_dir.fd, @@ -1505,7 +1504,7 @@ pub fn NewPackageInstall(comptime kind: PkgInstallKind) type { } }; - threadlocal var node_fs_for_package_installer: bun.JSC.Node.NodeFS = .{}; + threadlocal var node_fs_for_package_installer: bun.JSC.Node.fs.NodeFS = .{}; fn initInstallDir(this: *@This(), state: *InstallDirState, destination_dir: std.fs.Dir, method: Method) Result { const destbase = destination_dir; @@ -1685,7 +1684,7 @@ pub fn NewPackageInstall(comptime kind: PkgInstallKind) type { if (comptime Environment.isPosix) { const stat = in_file.stat() catch continue; - _ = C.fchmod(outfile.handle, @intCast(stat.mode)); + _ = bun.c.fchmod(outfile.handle, @intCast(stat.mode)); } bun.copyFileWithState(.fromStdFile(in_file), .fromStdFile(outfile), ©_file_state).unwrap() catch |err| { @@ -5143,7 +5142,7 @@ pub const PackageManager = struct { try buffered_writer.flush(); if (comptime Environment.isPosix) { - _ = C.fchmod( + _ = bun.c.fchmod( tmpfile.fd.cast(), // chmod 666, 0o0000040 | 0o0000004 | 0o0000002 | 0o0000400 | 0o0000200 | 0o0000020, @@ -11734,7 +11733,7 @@ pub const PackageManager = struct { defer outfile.close(); const stat = in_file.stat() catch continue; - _ = C.fchmod(outfile.handle, @intCast(stat.mode)); + _ = bun.c.fchmod(outfile.handle, @intCast(stat.mode)); bun.copyFileWithState(.fromStdFile(in_file), .fromStdFile(outfile), ©_file_state).unwrap() catch |err| { Output.prettyError("{s}: copying file {}", .{ @errorName(err), bun.fmt.fmtOSPath(entry.path, .{}) }); @@ -12240,8 +12239,8 @@ pub const PackageManager = struct { .posix, ); - var nodefs = bun.JSC.Node.NodeFS{}; - const args = bun.JSC.Node.Arguments.Mkdir{ + var nodefs = bun.JSC.Node.fs.NodeFS{}; + const args = bun.JSC.Node.fs.Arguments.Mkdir{ .path = .{ .string = bun.PathString.init(manager.options.patch_features.commit.patches_dir) }, }; if (nodefs.mkdirRecursive(args).asErr()) |e| { @@ -13672,9 +13671,9 @@ pub const PackageManager = struct { Global.exit(1); }; - const is_writable = if (stat.uid == bun.C.getuid()) + const is_writable = if (stat.uid == bun.c.getuid()) stat.mode & bun.S.IWUSR > 0 - else if (stat.gid == bun.C.getgid()) + else if (stat.gid == bun.c.getgid()) stat.mode & bun.S.IWGRP > 0 else stat.mode & bun.S.IWOTH > 0; @@ -14113,7 +14112,7 @@ pub const PackageManager = struct { // Attempt to create a new node_modules folder if (bun.sys.mkdir("node_modules", 0o755).asErr()) |err| { - if (err.errno != @intFromEnum(bun.C.E.EXIST)) { + if (err.errno != @intFromEnum(bun.sys.E.EXIST)) { Output.err(err, "could not create the \"node_modules\" directory", .{}); Global.crash(); } diff --git a/src/install/lockfile.zig b/src/install/lockfile.zig index 1a35ca8e3e..93ebc1d2c5 100644 --- a/src/install/lockfile.zig +++ b/src/install/lockfile.zig @@ -11,7 +11,7 @@ const Glob = bun.glob; const MutableString = bun.MutableString; const stringZ = bun.stringZ; const default_allocator = bun.default_allocator; -const C = bun.C; + const JSAst = bun.JSAst; const TextLockfile = @import("./bun.lock.zig"); const OOM = bun.OOM; diff --git a/src/install/patch_install.zig b/src/install/patch_install.zig index b4c0fa2f35..e2cbb29a4e 100644 --- a/src/install/patch_install.zig +++ b/src/install/patch_install.zig @@ -445,7 +445,7 @@ pub const PatchTask = struct { const stat: bun.Stat = switch (bun.sys.stat(absolute_patchfile_path)) { .err => |e| { - if (e.getErrno() == bun.C.E.NOENT) { + if (e.getErrno() == .NOENT) { const fmt = "\n\nerror: could not find patch file {s}\n\nPlease make sure it exists.\n\nTo create a new patch file run:\n\n bun patch {s}\n"; const args = .{ this.callback.calc_hash.patchfile_path, diff --git a/src/install/windows-shim/bun_shim_impl.zig b/src/install/windows-shim/bun_shim_impl.zig index 0bcb77c211..e3359adfac 100644 --- a/src/install/windows-shim/bun_shim_impl.zig +++ b/src/install/windows-shim/bun_shim_impl.zig @@ -737,7 +737,7 @@ fn launcher(comptime mode: LauncherMode, bun_ctx: anytype) mode.RetType() { // Prepare stdio for the child process, as after this we are going to *immediatly* exit // it is likely that the c-runtime's atexit will not be called as we end the process ourselves. bun.Output.Source.Stdio.restore(); - bun.C.windows_enable_stdio_inheritance(); + bun.windows.windows_enable_stdio_inheritance(); } // I attempted to use lower level methods for this, but it really seems diff --git a/src/io/PipeReader.zig b/src/io/PipeReader.zig index 74b857b066..15f1a5cb04 100644 --- a/src/io/PipeReader.zig +++ b/src/io/PipeReader.zig @@ -467,7 +467,7 @@ const PosixBufferedReader = struct { } if (comptime file_type == .pipe) { - if (bun.Environment.isMac or !bun.C.RWFFlagSupport.isMaybeSupported()) { + if (bun.Environment.isMac or !bun.linux.RWFFlagSupport.isMaybeSupported()) { switch (bun.isReadable(fd)) { .ready => {}, .hup => { @@ -559,7 +559,7 @@ const PosixBufferedReader = struct { } if (comptime file_type == .pipe) { - if (bun.Environment.isMac or !bun.C.RWFFlagSupport.isMaybeSupported()) { + if (bun.Environment.isMac or !bun.linux.RWFFlagSupport.isMaybeSupported()) { switch (bun.isReadable(fd)) { .ready => {}, .hup => { @@ -867,7 +867,7 @@ pub const WindowsBufferedReader = struct { pub fn setRawMode(this: *WindowsBufferedReader, value: bool) bun.JSC.Maybe(void) { const source = this.source orelse return .{ .err = .{ - .errno = @intFromEnum(bun.C.E.BADF), + .errno = @intFromEnum(bun.sys.E.BADF), .syscall = .uv_tty_set_mode, }, }; @@ -970,7 +970,7 @@ pub const WindowsBufferedReader = struct { } // ops we should not hit this lets fail with EPIPE bun.assert(false); - return this.onRead(.{ .err = bun.sys.Error.fromCode(bun.C.E.PIPE, .read) }, "", .progress); + return this.onRead(.{ .err = bun.sys.Error.fromCode(bun.sys.E.PIPE, .read) }, "", .progress); }, } } @@ -978,7 +978,7 @@ pub const WindowsBufferedReader = struct { pub fn startReading(this: *WindowsBufferedReader) bun.JSC.Maybe(void) { if (this.flags.is_done or !this.flags.is_paused) return .{ .result = {} }; this.flags.is_paused = false; - const source: Source = this.source orelse return .{ .err = bun.sys.Error.fromCode(bun.C.E.BADF, .read) }; + const source: Source = this.source orelse return .{ .err = bun.sys.Error.fromCode(bun.sys.E.BADF, .read) }; bun.assert(!source.isClosed()); switch (source) { diff --git a/src/io/PipeWriter.zig b/src/io/PipeWriter.zig index e3ac5ec5ca..864d51f31d 100644 --- a/src/io/PipeWriter.zig +++ b/src/io/PipeWriter.zig @@ -82,7 +82,7 @@ pub fn PosixPipeWriter( fn writeToBlockingPipe(fd: bun.FileDescriptor, buf: []const u8) JSC.Maybe(usize) { if (comptime bun.Environment.isLinux) { - if (bun.C.linux.RWFFlagSupport.isMaybeSupported()) { + if (bun.linux.RWFFlagSupport.isMaybeSupported()) { return bun.sys.writeNonblocking(fd, buf); } } @@ -1351,7 +1351,7 @@ pub fn WindowsStreamingWriter(comptime Parent: type, function_table: anytype) ty } var pipe = this.source orelse { - const err = bun.sys.Error.fromCode(bun.C.E.PIPE, .pipe); + const err = bun.sys.Error.fromCode(bun.sys.E.PIPE, .pipe); this.last_write_result = .{ .err = err }; onError(this.parent, err); this.closeWithoutReporting(); diff --git a/src/io/io.zig b/src/io/io.zig index 9e493b9387..7b9ebd0741 100644 --- a/src/io/io.zig +++ b/src/io/io.zig @@ -39,7 +39,7 @@ pub const Loop = struct { epoll.data.ptr = @intFromPtr(&loop); const rc = std.os.linux.epoll_ctl(loop.epoll_fd.cast(), std.os.linux.EPOLL.CTL_ADD, loop.waker.getFd().cast(), &epoll); - switch (bun.C.getErrno(rc)) { + switch (bun.sys.getErrno(rc)) { .SUCCESS => {}, else => |err| bun.Output.panic("Failed to wait on epoll {s}", .{@tagName(err)}), } @@ -149,7 +149,7 @@ pub const Loop = struct { std.math.maxInt(i32), ); - switch (bun.C.getErrno(rc)) { + switch (bun.sys.getErrno(rc)) { .INTR => continue, .SUCCESS => {}, else => |e| bun.Output.panic("epoll_wait: {s}", .{@tagName(e)}), @@ -270,7 +270,7 @@ pub const Loop = struct { null, ); - switch (bun.C.getErrno(rc)) { + switch (bun.sys.getErrno(rc)) { .INTR => continue, .SUCCESS => {}, else => |e| bun.Output.panic("kevent64 failed: {s}", .{@tagName(e)}), @@ -344,8 +344,8 @@ pub const Action = union(enum) { }; }; -const ReadFile = bun.JSC.WebCore.Blob.ReadFile; -const WriteFile = bun.JSC.WebCore.Blob.WriteFile; +const ReadFile = bun.webcore.Blob.read_file.ReadFile; +const WriteFile = bun.webcore.Blob.write_file.WriteFile; const Pollable = struct { const Tag = enum(bun.TaggedPointer.Tag) { @@ -614,7 +614,7 @@ pub const Poll = struct { inline else => |t| { var this: *Pollable.Tag.Type(t) = @alignCast(@fieldParentPtr("io_poll", poll)); if (event.events & linux.EPOLL.ERR != 0) { - const errno = bun.C.getErrno(event.events); + const errno = bun.sys.getErrno(event.events); log("error() = {s}", .{@tagName(errno)}); this.onIOError(bun.sys.Error.fromCode(errno, .epoll_ctl)); } else { @@ -683,7 +683,7 @@ pub const Poll = struct { } }; -pub const retry = bun.C.E.AGAIN; +pub const retry = bun.sys.E.AGAIN; pub const ReadState = @import("./pipes.zig").ReadState; pub const PipeReader = @import("./PipeReader.zig").PipeReader; diff --git a/src/io/source.zig b/src/io/source.zig index a37c62ac95..ed5d3d5ba9 100644 --- a/src/io/source.zig +++ b/src/io/source.zig @@ -214,7 +214,7 @@ pub const Source = union(enum) { }, else => .{ .err = .{ - .errno = @intFromEnum(bun.C.E.NOTSUP), + .errno = @intFromEnum(bun.sys.E.NOTSUP), .syscall = .uv_tty_set_mode, .fd = this.getFd(), }, diff --git a/src/js/internal-for-testing.ts b/src/js/internal-for-testing.ts index 9937a82aab..f17d043862 100644 --- a/src/js/internal-for-testing.ts +++ b/src/js/internal-for-testing.ts @@ -101,7 +101,7 @@ export const memfd_create: (size: number) => number = $newZigFunction( ); export const setSyntheticAllocationLimitForTesting: (limit: number) => number = $newZigFunction( - "javascript.zig", + "virtual_machine_exports.zig", "Bun__setSyntheticAllocationLimitForTesting", 1, ); diff --git a/src/js_ast.zig b/src/js_ast.zig index d94460d931..cb78a2ece6 100644 --- a/src/js_ast.zig +++ b/src/js_ast.zig @@ -11,7 +11,7 @@ const strings = bun.strings; const MutableString = bun.MutableString; const stringZ = bun.stringZ; const default_allocator = bun.default_allocator; -const C = bun.C; + pub const Ref = @import("ast/base.zig").Ref; pub const Index = @import("ast/base.zig").Index; const RefHashCtx = @import("ast/base.zig").RefHashCtx; @@ -7904,13 +7904,11 @@ pub fn printmem(comptime format: string, args: anytype) void { pub const Macro = struct { const JavaScript = bun.JSC; - const JSCBase = @import("./bun.js/base.zig"); const Resolver = @import("./resolver/resolver.zig").Resolver; const isPackagePath = @import("./resolver/resolver.zig").isPackagePath; const ResolveResult = @import("./resolver/resolver.zig").Result; const DotEnv = @import("./env_loader.zig"); const js = @import("./bun.js/javascript_core_c_api.zig"); - const Zig = @import("./bun.js/bindings/exports.zig"); const Transpiler = bun.Transpiler; const MacroEntryPoint = bun.transpiler.EntryPoints.MacroEntryPoint; const MacroRemap = @import("./resolver/package_json.zig").MacroMap; @@ -7969,7 +7967,7 @@ pub const Macro = struct { bun.assert(!isMacroPath(import_record_path_without_macro_prefix)); const input_specifier = brk: { - if (JSC.HardcodedModule.Alias.get(import_record_path, .bun)) |replacement| { + if (JSC.ModuleLoader.HardcodedModule.Alias.get(import_record_path, .bun)) |replacement| { break :brk replacement.path; } @@ -8133,7 +8131,7 @@ pub const Macro = struct { const VisitMap = std.AutoHashMapUnmanaged(JSC.JSValue, Expr); threadlocal var args_buf: [3]js.JSObjectRef = undefined; - threadlocal var exception_holder: Zig.ZigException.Holder = undefined; + threadlocal var exception_holder: JSC.ZigException.Holder = undefined; pub const MacroError = error{ MacroFailed, OutOfMemory } || ToJSError || bun.JSError; pub const Run = struct { @@ -8252,7 +8250,7 @@ pub const Macro = struct { } else if (value.as(JSC.WebCore.Blob)) |resp| { blob_ = resp.*; blob_.?.allocator = null; - } else if (value.as(JSC.ResolveMessage) != null or value.as(JSC.BuildMessage) != null) { + } else if (value.as(bun.api.ResolveMessage) != null or value.as(bun.api.BuildMessage) != null) { _ = this.macro.vm.uncaughtException(this.global, value, false); return error.MacroFailed; } @@ -8462,7 +8460,7 @@ pub const Macro = struct { ) MacroError!Expr { if (comptime Environment.isDebug) Output.prettyln("[macro] call {s}", .{function_name}); - exception_holder = Zig.ZigException.Holder.init(); + exception_holder = JSC.ZigException.Holder.init(); var js_args: []JSC.JSValue = &.{}; var js_processed_args_len: usize = 0; defer { diff --git a/src/js_lexer.zig b/src/js_lexer.zig index 51fb58fbed..b422a99d66 100644 --- a/src/js_lexer.zig +++ b/src/js_lexer.zig @@ -14,7 +14,7 @@ const CodePoint = bun.CodePoint; const MutableString = bun.MutableString; const stringZ = bun.stringZ; const default_allocator = bun.default_allocator; -const C = bun.C; + const FeatureFlags = @import("feature_flags.zig"); const JavascriptString = []const u16; const Indentation = bun.js_printer.Options.Indentation; diff --git a/src/js_parser.zig b/src/js_parser.zig index 14bf169d24..6f04e71f34 100644 --- a/src/js_parser.zig +++ b/src/js_parser.zig @@ -24,7 +24,7 @@ const strings = bun.strings; const MutableString = bun.MutableString; const stringZ = bun.stringZ; const default_allocator = bun.default_allocator; -const C = bun.C; + const G = js_ast.G; const Define = @import("./defines.zig").Define; const DefineData = @import("./defines.zig").DefineData; @@ -7450,10 +7450,11 @@ fn NewParser_( .bin_rem => { if (p.should_fold_typescript_constant_expressions) { if (Expr.extractNumericValues(e_.left.data, e_.right.data)) |vals| { + const fmod = @extern(*const fn (f64, f64) callconv(.C) f64, .{ .name = "fmod" }); return p.newExpr( // Use libc fmod here to be consistent with what JavaScriptCore does // https://github.com/oven-sh/WebKit/blob/7a0b13626e5db69aa5a32d037431d381df5dfb61/Source/JavaScriptCore/runtime/MathCommon.cpp#L574-L597 - E.Number{ .value = if (comptime Environment.isNative) bun.C.fmod(vals[0], vals[1]) else std.math.mod(f64, vals[0], vals[1]) catch 0 }, + E.Number{ .value = if (comptime Environment.isNative) fmod(vals[0], vals[1]) else std.math.mod(f64, vals[0], vals[1]) catch 0 }, v.loc, ); } diff --git a/src/js_printer.zig b/src/js_printer.zig index 19137c9f72..bff9896680 100644 --- a/src/js_printer.zig +++ b/src/js_printer.zig @@ -18,7 +18,7 @@ const strings = bun.strings; const MutableString = bun.MutableString; const stringZ = bun.stringZ; const default_allocator = bun.default_allocator; -const C = bun.C; + const Ref = @import("ast/base.zig").Ref; const StoredFileDescriptorType = bun.StoredFileDescriptorType; const FeatureFlags = bun.FeatureFlags; diff --git a/src/jsc.zig b/src/jsc.zig deleted file mode 100644 index 0ce27f5860..0000000000 --- a/src/jsc.zig +++ /dev/null @@ -1,141 +0,0 @@ -//! The `bun.JSC` namespace contains -//! 1. bindings for JavaScriptCore APIs (bun.JSC.*) -//! 2. zig components for Bun APIs (in bun.JSC.API or bun.JSC.*) -//! 3. zig components for Node APIs (in bun.JSC.Node) -//! 4. zig components for Web APIs (in bun.JSC.WebCore) -pub usingnamespace @import("./bun.js/base.zig"); -pub usingnamespace @import("./bun.js/bindings/bindings.zig"); -pub usingnamespace @import("./bun.js/bindings/exports.zig"); -pub usingnamespace @import("./bun.js/event_loop.zig"); -pub usingnamespace @import("./bun.js/javascript.zig"); -pub usingnamespace @import("./bun.js/module_loader.zig"); -pub const Debugger = @import("./bun.js/bindings/Debugger.zig").Debugger; -pub const napi = @import("./napi/napi.zig"); -pub const RareData = @import("./bun.js/rare_data.zig"); -pub const C = @import("./bun.js/javascript_core_c_api.zig"); -pub const WebCore = @import("./bun.js/webcore.zig"); -pub const BuildMessage = @import("./bun.js/BuildMessage.zig").BuildMessage; -pub const ResolveMessage = @import("./bun.js/ResolveMessage.zig").ResolveMessage; -pub const Cloudflare = struct { - pub const HTMLRewriter = @import("./bun.js/api/html_rewriter.zig").HTMLRewriter; - pub const ContentOptions = @import("./bun.js/api/html_rewriter.zig").ContentOptions; - pub const Element = @import("./bun.js/api/html_rewriter.zig").Element; - pub const Comment = @import("./bun.js/api/html_rewriter.zig").Comment; - pub const TextChunk = @import("./bun.js/api/html_rewriter.zig").TextChunk; - pub const DocType = @import("./bun.js/api/html_rewriter.zig").DocType; - pub const DocEnd = @import("./bun.js/api/html_rewriter.zig").DocEnd; - pub const EndTag = @import("./bun.js/api/html_rewriter.zig").EndTag; - pub const AttributeIterator = @import("./bun.js/api/html_rewriter.zig").AttributeIterator; -}; -pub const Jest = @import("./bun.js/test/jest.zig"); -pub const Expect = @import("./bun.js/test/expect.zig"); -pub const Snapshot = @import("./bun.js/test/snapshot.zig"); -pub const API = struct { - pub const NodeHTTPResponse = @import("./bun.js/api/server.zig").NodeHTTPResponse; - pub const Glob = @import("./bun.js/api/glob.zig"); - pub const Shell = @import("./shell/shell.zig"); - pub const JSBundler = @import("./bun.js/api/JSBundler.zig").JSBundler; - pub const BuildArtifact = @import("./bun.js/api/JSBundler.zig").BuildArtifact; - pub const JSTranspiler = @import("./bun.js/api/JSTranspiler.zig"); - pub const HTTPServer = @import("./bun.js/api/server.zig").HTTPServer; - pub const AnyServer = @import("./bun.js/api/server.zig").AnyServer; - pub const SavedRequest = @import("./bun.js/api/server.zig").SavedRequest; - pub const ServerConfig = @import("./bun.js/api/server.zig").ServerConfig; - pub const ServerWebSocket = @import("./bun.js/api/server.zig").ServerWebSocket; - pub const HTTPSServer = @import("./bun.js/api/server.zig").HTTPSServer; - pub const DebugHTTPServer = @import("./bun.js/api/server.zig").DebugHTTPServer; - pub const DebugHTTPSServer = @import("./bun.js/api/server.zig").DebugHTTPSServer; - pub const AnyRequestContext = @import("./bun.js/api/server.zig").AnyRequestContext; - pub const Bun = @import("./bun.js/api/BunObject.zig"); - pub const FileSystemRouter = @import("./bun.js/api/filesystem_router.zig").FileSystemRouter; - pub const MatchedRoute = @import("./bun.js/api/filesystem_router.zig").MatchedRoute; - pub const TCPSocket = @import("./bun.js/api/bun/socket.zig").TCPSocket; - pub const TLSSocket = @import("./bun.js/api/bun/socket.zig").TLSSocket; - pub const UDPSocket = @import("./bun.js/api/bun/udp_socket.zig").UDPSocket; - pub const SocketAddress = @import("./bun.js/api/bun/socket.zig").SocketAddress; - pub const Listener = @import("./bun.js/api/bun/socket.zig").Listener; - pub const H2FrameParser = @import("./bun.js/api/bun/h2_frame_parser.zig").H2FrameParser; - pub const NativeZlib = @import("./bun.js/node/node_zlib_binding.zig").SNativeZlib; - pub const NativeBrotli = @import("./bun.js/node/node_zlib_binding.zig").SNativeBrotli; - pub const HTMLBundle = @import("./bun.js/api/server/HTMLBundle.zig"); - pub const Valkey = @import("./valkey/js_valkey.zig").JSValkeyClient; -}; -pub const Postgres = @import("./sql/postgres.zig"); -pub const DNS = @import("./bun.js/api/bun/dns_resolver.zig"); -pub const FFI = @import("./bun.js/api/ffi.zig").FFI; -pub const Node = struct { - pub usingnamespace @import("./bun.js/node/types.zig"); - pub usingnamespace @import("./bun.js/node/node_fs.zig"); - pub usingnamespace @import("./bun.js/node/node_fs_watcher.zig"); - pub usingnamespace @import("./bun.js/node/node_fs_stat_watcher.zig"); - pub usingnamespace @import("./bun.js/node/node_fs_binding.zig"); - pub usingnamespace @import("./bun.js/node/node_os.zig"); - pub const fs = @import("./bun.js/node/node_fs_constant.zig"); - pub const Util = struct { - pub const parseArgs = @import("./bun.js/node/util/parse_args.zig").parseArgs; - }; - pub const Crypto = @import("./bun.js/node/node_crypto_binding.zig"); -}; - -pub const js_property_iterator = @import("bun.js/bindings/JSPropertyIterator.zig"); -pub const JSPropertyIterator = js_property_iterator.JSPropertyIterator; -pub const JSPropertyIteratorOptions = js_property_iterator.JSPropertyIteratorOptions; - -const std = @import("std"); -const Syscall = @import("./sys.zig"); -const Output = @import("./output.zig"); - -pub const Maybe = Syscall.Maybe; -pub const jsBoolean = @This().JSValue.jsBoolean; -pub const jsEmptyString = @This().JSValue.jsEmptyString; -pub const jsNumber = @This().JSValue.jsNumber; - -const log = Output.scoped(.JSC, true); -pub inline fn markBinding(src: std.builtin.SourceLocation) void { - log("{s} ({s}:{d})", .{ src.fn_name, src.file, src.line }); -} -pub inline fn markMemberBinding(comptime class: anytype, src: std.builtin.SourceLocation) void { - if (!bun.Environment.enable_logs) return; - const classname = switch (@typeInfo(@TypeOf(class))) { - .pointer => class, // assumed to be a static string - else => @typeName(class), - }; - log("{s}.{s} ({s}:{d})", .{ classname, src.fn_name, src.file, src.line }); -} - -pub const Subprocess = API.Bun.Subprocess; - -/// This file is generated by: -/// 1. `bun src/bun.js/scripts/generate-classes.ts` -/// 2. Scan for **/*.classes.ts files in src/bun.js/src -/// 3. Generate a JS wrapper for each class in: -/// - Zig: generated_classes.zig -/// - C++: ZigGeneratedClasses.h, ZigGeneratedClasses.cpp -/// 4. For the Zig code to successfully compile: -/// - Add it to generated_classes_list.zig -/// - Expose the generated methods: -/// ```zig -/// pub const js = JSC.Codegen.JSMyClassName; -/// pub const toJS = js.toJS; -/// pub const fromJS = js.fromJS; -/// pub const fromJSDirect = js.fromJSDirect; -/// ``` -/// 5. `bun run build` -/// -pub const Codegen = @import("ZigGeneratedClasses"); -pub const GeneratedClassesList = @import("./bun.js/bindings/generated_classes_list.zig").Classes; - -pub const RuntimeTranspilerCache = @import("./bun.js/RuntimeTranspilerCache.zig").RuntimeTranspilerCache; - -/// The calling convention used for JavaScript functions <> Native -const bun = @import("bun"); -pub const conv = if (bun.Environment.isWindows and bun.Environment.isX64) - std.builtin.CallingConvention.SysV -else - std.builtin.CallingConvention.C; - -pub const Error = @import("ErrorCode").Error; - -pub const MAX_SAFE_INTEGER = 9007199254740991; - -pub const MIN_SAFE_INTEGER = -9007199254740991; diff --git a/src/json_parser.zig b/src/json_parser.zig index 92176f447a..7771fcac5d 100644 --- a/src/json_parser.zig +++ b/src/json_parser.zig @@ -15,7 +15,7 @@ const strings = bun.strings; const MutableString = bun.MutableString; const stringZ = bun.stringZ; const default_allocator = bun.default_allocator; -const C = bun.C; + const expect = std.testing.expect; const ImportKind = importRecord.ImportKind; const BindingNodeIndex = js_ast.BindingNodeIndex; diff --git a/src/libarchive/libarchive-bindings.zig b/src/libarchive/libarchive-bindings.zig index dbf7e5dc1c..8ac3726840 100644 --- a/src/libarchive/libarchive-bindings.zig +++ b/src/libarchive/libarchive-bindings.zig @@ -956,7 +956,7 @@ pub const Archive = opaque { .retry => continue, .eof => Return.initRes(null), .ok => { - const kind = bun.C.kindFromMode(entry.filetype()); + const kind = bun.sys.kindFromMode(entry.filetype()); if (this.filter.contains(kind)) continue; diff --git a/src/libarchive/libarchive.zig b/src/libarchive/libarchive.zig index 1c20f2cff2..3bbe1b257c 100644 --- a/src/libarchive/libarchive.zig +++ b/src/libarchive/libarchive.zig @@ -11,7 +11,7 @@ const MutableString = bun.MutableString; const FileDescriptorType = bun.FileDescriptor; const stringZ = bun.stringZ; const default_allocator = bun.default_allocator; -const C = bun.C; +const c = bun.c; const std = @import("std"); const Archive = lib.Archive; const JSC = bun.JSC; @@ -370,7 +370,7 @@ pub const Archiver = struct { } } - const kind = C.kindFromMode(entry.filetype()); + const kind = bun.sys.kindFromMode(entry.filetype()); if (options.npm) { // - ignore entries other than files (`true` can only be returned if type is file) @@ -406,9 +406,9 @@ pub const Archiver = struct { remain = remain[2..]; } - for (remain) |*c| { - switch (c.*) { - '|', '<', '>', '?', ':' => c.* += 0xf000, + for (remain) |*char| { + switch (char.*) { + '|', '<', '>', '?', ':' => char.* += 0xf000, else => {}, } } @@ -476,8 +476,8 @@ pub const Archiver = struct { switch (bun.sys.openatWindows(.fromNative(dir_fd), path, flags, 0)) { .result => |fd| fd, .err => |e| switch (e.errno) { - @intFromEnum(bun.C.E.PERM), - @intFromEnum(bun.C.E.NOENT), + @intFromEnum(bun.sys.E.PERM), + @intFromEnum(bun.sys.E.NOENT), => brk: { bun.MakePath.makePath(u16, dir, bun.Dirname.dirname(u16, path_slice) orelse return bun.errnoToZigErr(e.errno)) catch {}; break :brk try bun.sys.openatWindows(.fromNative(dir_fd), path, flags, 0).unwrap(); @@ -556,7 +556,7 @@ pub const Archiver = struct { // #define MAX_WRITE (1024 * 1024) if (comptime Environment.isLinux) { if (size > 1_000_000) { - C.preallocate_file( + bun.sys.preallocate_file( file_handle.cast(), 0, @intCast(size), diff --git a/src/linker.zig b/src/linker.zig index 0579796cdb..068caf1a7c 100644 --- a/src/linker.zig +++ b/src/linker.zig @@ -9,7 +9,7 @@ const MutableString = bun.MutableString; const stringZ = bun.stringZ; const default_allocator = bun.default_allocator; const FileDescriptorType = bun.FileDescriptor; -const C = bun.C; + const Ref = @import("./ast/base.zig").Ref; const std = @import("std"); @@ -186,7 +186,7 @@ pub const Linker = struct { } if (comptime is_bun) { - if (JSC.HardcodedModule.Alias.get(import_record.path.text, linker.options.target)) |replacement| { + if (JSC.ModuleLoader.HardcodedModule.Alias.get(import_record.path.text, linker.options.target)) |replacement| { if (replacement.tag == .builtin and import_record.kind.isCommonJS()) continue; import_record.path.text = replacement.path; diff --git a/src/linux.zig b/src/linux.zig new file mode 100644 index 0000000000..4187c867a3 --- /dev/null +++ b/src/linux.zig @@ -0,0 +1,93 @@ +//! Platform specific APIs for Linux +//! +//! If an API can be implemented on multiple platforms, +//! it does not belong in this namespace. + +pub const memfd_allocator = @import("allocators/linux_memfd_allocator.zig").LinuxMemFdAllocator; + +/// splice() moves data between two file descriptors without copying +/// between kernel address space and user address space. It +/// transfers up to len bytes of data from the file descriptor fd_in +/// to the file descriptor fd_out, where one of the file descriptors +/// must refer to a pipe. +pub fn splice(fd_in: std.posix.fd_t, off_in: ?*i64, fd_out: std.posix.fd_t, off_out: ?*i64, len: usize, flags: u32) usize { + return std.os.linux.syscall6( + .splice, + @as(usize, @bitCast(@as(isize, fd_in))), + @intFromPtr(off_in), + @as(usize, @bitCast(@as(isize, fd_out))), + @intFromPtr(off_out), + len, + flags, + ); +} + +pub const RWFFlagSupport = enum(u8) { + unknown = 0, + unsupported = 2, + supported = 1, + + var rwf_bool = std.atomic.Value(RWFFlagSupport).init(RWFFlagSupport.unknown); + + pub fn isLinuxKernelVersionWithBuggyRWF_NONBLOCK() bool { + return bun.linuxKernelVersion().major == 5 and switch (bun.linuxKernelVersion().minor) { + 9, 10 => true, + else => false, + }; + } + + pub fn disable() void { + rwf_bool.store(.unsupported, .monotonic); + } + + /// Workaround for https://github.com/google/gvisor/issues/2601 + pub fn isMaybeSupported() bool { + if (comptime !bun.Environment.isLinux) return false; + switch (rwf_bool.load(.monotonic)) { + .unknown => { + if (isLinuxKernelVersionWithBuggyRWF_NONBLOCK() or bun.getRuntimeFeatureFlag("BUN_FEATURE_FLAG_DISABLE_RWF_NONBLOCK")) { + rwf_bool.store(.unsupported, .monotonic); + return false; + } + + rwf_bool.store(.supported, .monotonic); + return true; + }, + .supported => { + return true; + }, + else => { + return false; + }, + } + + unreachable; + } +}; + +/// https://man7.org/linux/man-pages/man2/ioctl_ficlone.2.html +/// +/// Support for FICLONE is dependent on the filesystem driver. +pub fn ioctl_ficlone(dest_fd: bun.FileDescriptor, srcfd: bun.FileDescriptor) usize { + return std.os.linux.ioctl(dest_fd.native(), bun.c.FICLONE, @intCast(srcfd.native())); +} + +export fn sys_epoll_pwait2(epfd: i32, events: ?[*]std.os.linux.epoll_event, maxevents: i32, timeout: ?*const std.os.linux.timespec, sigmask: ?*const std.os.linux.sigset_t) isize { + return @bitCast( + std.os.linux.syscall6( + .epoll_pwait2, + @bitCast(@as(isize, @intCast(epfd))), + @intFromPtr(events), + @bitCast(@as(isize, @intCast(maxevents))), + @intFromPtr(timeout), + @intFromPtr(sigmask), + // This is the correct value. glibc claims to pass `sizeof sigset_t` for this argument, + // which would be 128, but they actually pass 8 which is what the kernel expects. + // https://github.com/ziglang/zig/issues/12715 + 8, + ), + ); +} + +const std = @import("std"); +const bun = @import("bun"); diff --git a/src/linux_c.zig b/src/linux_c.zig deleted file mode 100644 index 57638ecc2e..0000000000 --- a/src/linux_c.zig +++ /dev/null @@ -1,698 +0,0 @@ -const std = @import("std"); -const bun = @import("bun"); -pub extern "c" fn memmem(haystack: [*]const u8, haystacklen: usize, needle: [*]const u8, needlelen: usize) ?[*]const u8; -pub const SystemErrno = enum(u8) { - SUCCESS = 0, - EPERM = 1, - ENOENT = 2, - ESRCH = 3, - EINTR = 4, - EIO = 5, - ENXIO = 6, - E2BIG = 7, - ENOEXEC = 8, - EBADF = 9, - ECHILD = 10, - EAGAIN = 11, - ENOMEM = 12, - EACCES = 13, - EFAULT = 14, - ENOTBLK = 15, - EBUSY = 16, - EEXIST = 17, - EXDEV = 18, - ENODEV = 19, - ENOTDIR = 20, - EISDIR = 21, - EINVAL = 22, - ENFILE = 23, - EMFILE = 24, - ENOTTY = 25, - ETXTBSY = 26, - EFBIG = 27, - ENOSPC = 28, - ESPIPE = 29, - EROFS = 30, - EMLINK = 31, - EPIPE = 32, - EDOM = 33, - ERANGE = 34, - EDEADLK = 35, - ENAMETOOLONG = 36, - ENOLCK = 37, - ENOSYS = 38, - ENOTEMPTY = 39, - ELOOP = 40, - EWOULDBLOCK = 41, - ENOMSG = 42, - EIDRM = 43, - ECHRNG = 44, - EL2NSYNC = 45, - EL3HLT = 46, - EL3RST = 47, - ELNRNG = 48, - EUNATCH = 49, - ENOCSI = 50, - EL2HLT = 51, - EBADE = 52, - EBADR = 53, - EXFULL = 54, - ENOANO = 55, - EBADRQC = 56, - EBADSLT = 57, - EDEADLOCK = 58, - EBFONT = 59, - ENOSTR = 60, - ENODATA = 61, - ETIME = 62, - ENOSR = 63, - ENONET = 64, - ENOPKG = 65, - EREMOTE = 66, - ENOLINK = 67, - EADV = 68, - ESRMNT = 69, - ECOMM = 70, - EPROTO = 71, - EMULTIHOP = 72, - EDOTDOT = 73, - EBADMSG = 74, - EOVERFLOW = 75, - ENOTUNIQ = 76, - EBADFD = 77, - EREMCHG = 78, - ELIBACC = 79, - ELIBBAD = 80, - ELIBSCN = 81, - ELIBMAX = 82, - ELIBEXEC = 83, - EILSEQ = 84, - ERESTART = 85, - ESTRPIPE = 86, - EUSERS = 87, - ENOTSOCK = 88, - EDESTADDRREQ = 89, - EMSGSIZE = 90, - EPROTOTYPE = 91, - ENOPROTOOPT = 92, - EPROTONOSUPPORT = 93, - ESOCKTNOSUPPORT = 94, - /// For Linux, EOPNOTSUPP is the real value - /// but it's ~the same and is incompatible across operating systems - /// https://lists.gnu.org/archive/html/bug-glibc/2002-08/msg00017.html - ENOTSUP = 95, - EPFNOSUPPORT = 96, - EAFNOSUPPORT = 97, - EADDRINUSE = 98, - EADDRNOTAVAIL = 99, - ENETDOWN = 100, - ENETUNREACH = 101, - ENETRESET = 102, - ECONNABORTED = 103, - ECONNRESET = 104, - ENOBUFS = 105, - EISCONN = 106, - ENOTCONN = 107, - ESHUTDOWN = 108, - ETOOMANYREFS = 109, - ETIMEDOUT = 110, - ECONNREFUSED = 111, - EHOSTDOWN = 112, - EHOSTUNREACH = 113, - EALREADY = 114, - EINPROGRESS = 115, - ESTALE = 116, - EUCLEAN = 117, - ENOTNAM = 118, - ENAVAIL = 119, - EISNAM = 120, - EREMOTEIO = 121, - EDQUOT = 122, - ENOMEDIUM = 123, - EMEDIUMTYPE = 124, - ECANCELED = 125, - ENOKEY = 126, - EKEYEXPIRED = 127, - EKEYREVOKED = 128, - EKEYREJECTED = 129, - EOWNERDEAD = 130, - ENOTRECOVERABLE = 131, - ERFKILL = 132, - EHWPOISON = 133, - - pub const max = 134; - - pub fn init(code: anytype) ?SystemErrno { - if (code < 0) { - if (code <= -max) { - return null; - } - return @enumFromInt(-code); - } - if (code >= max) return null; - return @enumFromInt(code); - } -}; - -pub const UV_E2BIG: i32 = @intFromEnum(SystemErrno.E2BIG); -pub const UV_EACCES: i32 = @intFromEnum(SystemErrno.EACCES); -pub const UV_EADDRINUSE: i32 = @intFromEnum(SystemErrno.EADDRINUSE); -pub const UV_EADDRNOTAVAIL: i32 = @intFromEnum(SystemErrno.EADDRNOTAVAIL); -pub const UV_EAFNOSUPPORT: i32 = @intFromEnum(SystemErrno.EAFNOSUPPORT); -pub const UV_EAGAIN: i32 = @intFromEnum(SystemErrno.EAGAIN); -pub const UV_EALREADY: i32 = @intFromEnum(SystemErrno.EALREADY); -pub const UV_EBADF: i32 = @intFromEnum(SystemErrno.EBADF); -pub const UV_EBUSY: i32 = @intFromEnum(SystemErrno.EBUSY); -pub const UV_ECANCELED: i32 = @intFromEnum(SystemErrno.ECANCELED); -pub const UV_ECHARSET: i32 = -bun.windows.libuv.UV_ECHARSET; -pub const UV_ECONNABORTED: i32 = @intFromEnum(SystemErrno.ECONNABORTED); -pub const UV_ECONNREFUSED: i32 = @intFromEnum(SystemErrno.ECONNREFUSED); -pub const UV_ECONNRESET: i32 = @intFromEnum(SystemErrno.ECONNRESET); -pub const UV_EDESTADDRREQ: i32 = @intFromEnum(SystemErrno.EDESTADDRREQ); -pub const UV_EEXIST: i32 = @intFromEnum(SystemErrno.EEXIST); -pub const UV_EFAULT: i32 = @intFromEnum(SystemErrno.EFAULT); -pub const UV_EHOSTUNREACH: i32 = @intFromEnum(SystemErrno.EHOSTUNREACH); -pub const UV_EINTR: i32 = @intFromEnum(SystemErrno.EINTR); -pub const UV_EINVAL: i32 = @intFromEnum(SystemErrno.EINVAL); -pub const UV_EIO: i32 = @intFromEnum(SystemErrno.EIO); -pub const UV_EISCONN: i32 = @intFromEnum(SystemErrno.EISCONN); -pub const UV_EISDIR: i32 = @intFromEnum(SystemErrno.EISDIR); -pub const UV_ELOOP: i32 = @intFromEnum(SystemErrno.ELOOP); -pub const UV_EMFILE: i32 = @intFromEnum(SystemErrno.EMFILE); -pub const UV_EMSGSIZE: i32 = @intFromEnum(SystemErrno.EMSGSIZE); -pub const UV_ENAMETOOLONG: i32 = @intFromEnum(SystemErrno.ENAMETOOLONG); -pub const UV_ENETDOWN: i32 = @intFromEnum(SystemErrno.ENETDOWN); -pub const UV_ENETUNREACH: i32 = @intFromEnum(SystemErrno.ENETUNREACH); -pub const UV_ENFILE: i32 = @intFromEnum(SystemErrno.ENFILE); -pub const UV_ENOBUFS: i32 = @intFromEnum(SystemErrno.ENOBUFS); -pub const UV_ENODEV: i32 = @intFromEnum(SystemErrno.ENODEV); -pub const UV_ENOENT: i32 = @intFromEnum(SystemErrno.ENOENT); -pub const UV_ENOMEM: i32 = @intFromEnum(SystemErrno.ENOMEM); -pub const UV_ENONET: i32 = @intFromEnum(SystemErrno.ENONET); -pub const UV_ENOSPC: i32 = @intFromEnum(SystemErrno.ENOSPC); -pub const UV_ENOSYS: i32 = @intFromEnum(SystemErrno.ENOSYS); -pub const UV_ENOTCONN: i32 = @intFromEnum(SystemErrno.ENOTCONN); -pub const UV_ENOTDIR: i32 = @intFromEnum(SystemErrno.ENOTDIR); -pub const UV_ENOTEMPTY: i32 = @intFromEnum(SystemErrno.ENOTEMPTY); -pub const UV_ENOTSOCK: i32 = @intFromEnum(SystemErrno.ENOTSOCK); -pub const UV_ENOTSUP: i32 = @intFromEnum(SystemErrno.ENOTSUP); -pub const UV_EPERM: i32 = @intFromEnum(SystemErrno.EPERM); -pub const UV_EPIPE: i32 = @intFromEnum(SystemErrno.EPIPE); -pub const UV_EPROTO: i32 = @intFromEnum(SystemErrno.EPROTO); -pub const UV_EPROTONOSUPPORT: i32 = @intFromEnum(SystemErrno.EPROTONOSUPPORT); -pub const UV_EPROTOTYPE: i32 = @intFromEnum(SystemErrno.EPROTOTYPE); -pub const UV_EROFS: i32 = @intFromEnum(SystemErrno.EROFS); -pub const UV_ESHUTDOWN: i32 = @intFromEnum(SystemErrno.ESHUTDOWN); -pub const UV_ESPIPE: i32 = @intFromEnum(SystemErrno.ESPIPE); -pub const UV_ESRCH: i32 = @intFromEnum(SystemErrno.ESRCH); -pub const UV_ETIMEDOUT: i32 = @intFromEnum(SystemErrno.ETIMEDOUT); -pub const UV_ETXTBSY: i32 = @intFromEnum(SystemErrno.ETXTBSY); -pub const UV_EXDEV: i32 = @intFromEnum(SystemErrno.EXDEV); -pub const UV_EFBIG: i32 = @intFromEnum(SystemErrno.EFBIG); -pub const UV_ENOPROTOOPT: i32 = @intFromEnum(SystemErrno.ENOPROTOOPT); -pub const UV_ERANGE: i32 = @intFromEnum(SystemErrno.ERANGE); -pub const UV_ENXIO: i32 = @intFromEnum(SystemErrno.ENXIO); -pub const UV_EMLINK: i32 = @intFromEnum(SystemErrno.EMLINK); -pub const UV_EHOSTDOWN: i32 = @intFromEnum(SystemErrno.EHOSTDOWN); -pub const UV_EREMOTEIO: i32 = @intFromEnum(SystemErrno.EREMOTEIO); -pub const UV_ENOTTY: i32 = @intFromEnum(SystemErrno.ENOTTY); -pub const UV_EFTYPE: i32 = -bun.windows.libuv.UV_EFTYPE; -pub const UV_EILSEQ: i32 = @intFromEnum(SystemErrno.EILSEQ); -pub const UV_EOVERFLOW: i32 = @intFromEnum(SystemErrno.EOVERFLOW); -pub const UV_ESOCKTNOSUPPORT: i32 = @intFromEnum(SystemErrno.ESOCKTNOSUPPORT); -pub const UV_ENODATA: i32 = @intFromEnum(SystemErrno.ENODATA); -pub const UV_EUNATCH: i32 = @intFromEnum(SystemErrno.EUNATCH); - -pub const preallocate_length = 2048 * 1024; -pub fn preallocate_file(fd: std.posix.fd_t, offset: std.posix.off_t, len: std.posix.off_t) anyerror!void { - // https://gist.github.com/Jarred-Sumner/b37b93399b63cbfd86e908c59a0a37df - // ext4 NVME Linux kernel 5.17.0-1016-oem x86_64 - // - // hyperfine "./micro 1024 temp" "./micro 1024 temp --preallocate" --prepare="rm -rf temp && free && sync && echo 3 > /proc/sys/vm/drop_caches && free" - // Benchmark 1: ./micro 1024 temp - // Time (mean ± σ): 1.8 ms ± 0.2 ms [User: 0.6 ms, System: 0.1 ms] - // Range (min … max): 1.2 ms … 2.3 ms 67 runs - // Benchmark 2: ./micro 1024 temp --preallocate - // Time (mean ± σ): 1.8 ms ± 0.1 ms [User: 0.6 ms, System: 0.1 ms] - // Range (min … max): 1.4 ms … 2.2 ms 121 runs - // Summary - // './micro 1024 temp --preallocate' ran - // 1.01 ± 0.13 times faster than './micro 1024 temp' - - // hyperfine "./micro 65432 temp" "./micro 65432 temp --preallocate" --prepare="rm -rf temp && free && sync && echo 3 > /proc/sys/vm/drop_caches && free" - // Benchmark 1: ./micro 65432 temp - // Time (mean ± σ): 1.8 ms ± 0.2 ms [User: 0.7 ms, System: 0.1 ms] - // Range (min … max): 1.2 ms … 2.3 ms 94 runs - // Benchmark 2: ./micro 65432 temp --preallocate - // Time (mean ± σ): 2.0 ms ± 0.1 ms [User: 0.6 ms, System: 0.1 ms] - // Range (min … max): 1.7 ms … 2.3 ms 108 runs - // Summary - // './micro 65432 temp' ran - // 1.08 ± 0.12 times faster than './micro 65432 temp --preallocate' - - // hyperfine "./micro 654320 temp" "./micro 654320 temp --preallocate" --prepare="rm -rf temp && free && sync && echo 3 > /proc/sys/vm/drop_caches && free" - // Benchmark 1: ./micro 654320 temp - // Time (mean ± σ): 2.3 ms ± 0.2 ms [User: 0.9 ms, System: 0.3 ms] - // Range (min … max): 1.9 ms … 2.9 ms 96 runs - - // Benchmark 2: ./micro 654320 temp --preallocate - // Time (mean ± σ): 2.2 ms ± 0.1 ms [User: 0.9 ms, System: 0.2 ms] - // Range (min … max): 1.9 ms … 2.7 ms 115 runs - - // Warning: Command took less than 5 ms to complete. Results might be inaccurate. - - // Summary - // './micro 654320 temp --preallocate' ran - // 1.04 ± 0.10 times faster than './micro 654320 temp' - - // hyperfine "./micro 6543200 temp" "./micro 6543200 temp --preallocate" --prepare="rm -rf temp && free && sync && echo 3 > /proc/sys/vm/drop_caches && free" - // Benchmark 1: ./micro 6543200 temp - // Time (mean ± σ): 6.3 ms ± 0.4 ms [User: 0.4 ms, System: 4.9 ms] - // Range (min … max): 5.8 ms … 8.6 ms 84 runs - - // Benchmark 2: ./micro 6543200 temp --preallocate - // Time (mean ± σ): 5.5 ms ± 0.3 ms [User: 0.5 ms, System: 3.9 ms] - // Range (min … max): 5.1 ms … 7.1 ms 93 runs - - // Summary - // './micro 6543200 temp --preallocate' ran - // 1.14 ± 0.09 times faster than './micro 6543200 temp' - - // hyperfine "./micro 65432000 temp" "./micro 65432000 temp --preallocate" --prepare="rm -rf temp && free && sync && echo 3 > /proc/sys/vm/drop_caches && free" - // Benchmark 1: ./micro 65432000 temp - // Time (mean ± σ): 52.9 ms ± 0.4 ms [User: 3.1 ms, System: 48.7 ms] - // Range (min … max): 52.4 ms … 54.4 ms 36 runs - - // Benchmark 2: ./micro 65432000 temp --preallocate - // Time (mean ± σ): 44.6 ms ± 0.8 ms [User: 2.3 ms, System: 41.2 ms] - // Range (min … max): 44.0 ms … 47.3 ms 37 runs - - // Summary - // './micro 65432000 temp --preallocate' ran - // 1.19 ± 0.02 times faster than './micro 65432000 temp' - - // hyperfine "./micro 65432000 temp" "./micro 65432000 temp --preallocate" --prepare="rm -rf temp" - // Benchmark 1: ./micro 65432000 temp - // Time (mean ± σ): 51.7 ms ± 0.9 ms [User: 2.1 ms, System: 49.6 ms] - // Range (min … max): 50.7 ms … 54.1 ms 49 runs - - // Benchmark 2: ./micro 65432000 temp --preallocate - // Time (mean ± σ): 43.8 ms ± 2.3 ms [User: 2.2 ms, System: 41.4 ms] - // Range (min … max): 42.7 ms … 54.7 ms 56 runs - - // Summary - // './micro 65432000 temp --preallocate' ran - // 1.18 ± 0.06 times faster than './micro 65432000 temp' - // - _ = std.os.linux.fallocate(fd, 0, @as(i64, @intCast(offset)), len); -} - -/// splice() moves data between two file descriptors without copying -/// between kernel address space and user address space. It -/// transfers up to len bytes of data from the file descriptor fd_in -/// to the file descriptor fd_out, where one of the file descriptors -/// must refer to a pipe. -pub fn splice(fd_in: std.posix.fd_t, off_in: ?*i64, fd_out: std.posix.fd_t, off_out: ?*i64, len: usize, flags: u32) usize { - return std.os.linux.syscall6( - .splice, - @as(usize, @bitCast(@as(isize, fd_in))), - @intFromPtr(off_in), - @as(usize, @bitCast(@as(isize, fd_out))), - @intFromPtr(off_out), - len, - flags, - ); -} - -// System related -pub const struct_sysinfo = extern struct { - uptime: c_long align(8), - loads: [3]c_ulong, - totalram: c_ulong, - freeram: c_ulong, - sharedram: c_ulong, - bufferram: c_ulong, - totalswap: c_ulong, - freeswap: c_ulong, - procs: u16, - pad: u16, - totalhigh: c_ulong, - freehigh: c_ulong, - mem_unit: u32, - pub fn _f(self: anytype) @import("std").zig.c_translation.FlexibleArrayType(@TypeOf(self), u8) { - const Intermediate = @import("std").zig.c_translation.FlexibleArrayType(@TypeOf(self), u8); - const ReturnType = @import("std").zig.c_translation.FlexibleArrayType(@TypeOf(self), u8); - return @as(ReturnType, @ptrCast(@alignCast(@as(Intermediate, @ptrCast(self)) + 108))); - } -}; -pub extern fn sysinfo(__info: [*c]struct_sysinfo) c_int; - -pub fn getFreeMemory() u64 { - var info: struct_sysinfo = undefined; - if (sysinfo(&info) == @as(c_int, 0)) return @as(u64, @bitCast(info.freeram)) *% @as(c_ulong, @bitCast(@as(c_ulong, info.mem_unit))); - return 0; -} - -pub fn getTotalMemory() u64 { - var info: struct_sysinfo = undefined; - if (sysinfo(&info) == @as(c_int, 0)) return @as(u64, @bitCast(info.totalram)) *% @as(c_ulong, @bitCast(@as(c_ulong, info.mem_unit))); - return 0; -} - -pub fn getSystemUptime() u64 { - var info: struct_sysinfo = undefined; - if (sysinfo(&info) == @as(c_int, 0)) return @as(u64, @bitCast(info.uptime)); - return 0; -} - -pub fn getSystemLoadavg() [3]f64 { - var info: struct_sysinfo = undefined; - if (sysinfo(&info) == @as(c_int, 0)) { - return [3]f64{ - std.math.ceil((@as(f64, @floatFromInt(info.loads[0])) / 65536.0) * 100.0) / 100.0, - std.math.ceil((@as(f64, @floatFromInt(info.loads[1])) / 65536.0) * 100.0) / 100.0, - std.math.ceil((@as(f64, @floatFromInt(info.loads[2])) / 65536.0) * 100.0) / 100.0, - }; - } - return [3]f64{ 0, 0, 0 }; -} - -pub fn get_version(name_buffer: *[bun.HOST_NAME_MAX]u8) []const u8 { - const uts = std.posix.uname(); - const result = bun.sliceTo(&uts.version, 0); - bun.copy(u8, name_buffer, result); - - return name_buffer[0..result.len]; -} - -pub fn get_release(name_buffer: *[bun.HOST_NAME_MAX]u8) []const u8 { - const uts = std.posix.uname(); - const result = bun.sliceTo(&uts.release, 0); - bun.copy(u8, name_buffer, result); - - return name_buffer[0..result.len]; -} - -// Taken from spawn.h header -pub const POSIX_SPAWN = struct { - pub const RESETIDS = 0x01; - pub const SETPGROUP = 0x02; - pub const SETSIGDEF = 0x04; - pub const SETSIGMASK = 0x08; - pub const SETSCHEDPARAM = 0x10; - pub const SETSCHEDULER = 0x20; - pub const USEVFORK = 0x40; - pub const SETSID = 0x80; -}; - -const fd_t = std.posix.fd_t; -const pid_t = std.posix.pid_t; -const mode_t = std.posix.mode_t; -const sigset_t = std.c.sigset_t; -const sched_param = std.posix.sched_param; - -pub const posix_spawnattr_t = extern struct { - __flags: c_short, - __pgrp: pid_t, - __sd: sigset_t, - __ss: sigset_t, - __sp: struct_sched_param, - __policy: c_int, - __pad: [16]c_int, -}; -pub const struct_sched_param = extern struct { - sched_priority: c_int, -}; -pub const struct___spawn_action = opaque {}; -pub const posix_spawn_file_actions_t = extern struct { - __allocated: c_int, - __used: c_int, - __actions: ?*struct___spawn_action, - __pad: [16]c_int, -}; - -pub extern "c" fn posix_spawn( - pid: *pid_t, - path: [*:0]const u8, - actions: ?*const posix_spawn_file_actions_t, - attr: ?*const posix_spawnattr_t, - argv: [*:null]?[*:0]const u8, - env: [*:null]?[*:0]const u8, -) c_int; -pub extern "c" fn posix_spawnp( - pid: *pid_t, - path: [*:0]const u8, - actions: ?*const posix_spawn_file_actions_t, - attr: ?*const posix_spawnattr_t, - argv: [*:null]?[*:0]const u8, - env: [*:null]?[*:0]const u8, -) c_int; -pub extern fn posix_spawnattr_init(__attr: *posix_spawnattr_t) c_int; -pub extern fn posix_spawnattr_destroy(__attr: *posix_spawnattr_t) c_int; -pub extern fn posix_spawnattr_getsigdefault(noalias __attr: [*c]const posix_spawnattr_t, noalias __sigdefault: [*c]sigset_t) c_int; -pub extern fn posix_spawnattr_setsigdefault(noalias __attr: [*c]posix_spawnattr_t, noalias __sigdefault: [*c]const sigset_t) c_int; -pub extern fn posix_spawnattr_getsigmask(noalias __attr: [*c]const posix_spawnattr_t, noalias __sigmask: [*c]sigset_t) c_int; -pub extern fn posix_spawnattr_setsigmask(noalias __attr: [*c]posix_spawnattr_t, noalias __sigmask: [*c]const sigset_t) c_int; -pub extern fn posix_spawnattr_getflags(noalias __attr: [*c]const posix_spawnattr_t, noalias __flags: [*c]c_short) c_int; -pub extern fn posix_spawnattr_setflags(_attr: [*c]posix_spawnattr_t, __flags: c_short) c_int; -pub extern fn posix_spawnattr_getpgroup(noalias __attr: [*c]const posix_spawnattr_t, noalias __pgroup: [*c]pid_t) c_int; -pub extern fn posix_spawnattr_setpgroup(__attr: [*c]posix_spawnattr_t, __pgroup: pid_t) c_int; -pub extern fn posix_spawnattr_getschedpolicy(noalias __attr: [*c]const posix_spawnattr_t, noalias __schedpolicy: [*c]c_int) c_int; -pub extern fn posix_spawnattr_setschedpolicy(__attr: [*c]posix_spawnattr_t, __schedpolicy: c_int) c_int; -pub extern fn posix_spawnattr_getschedparam(noalias __attr: [*c]const posix_spawnattr_t, noalias __schedparam: [*c]struct_sched_param) c_int; -pub extern fn posix_spawnattr_setschedparam(noalias __attr: [*c]posix_spawnattr_t, noalias __schedparam: [*c]const struct_sched_param) c_int; -pub extern fn posix_spawn_file_actions_init(__file_actions: *posix_spawn_file_actions_t) c_int; -pub extern fn posix_spawn_file_actions_destroy(__file_actions: *posix_spawn_file_actions_t) c_int; -pub extern fn posix_spawn_file_actions_addopen(noalias __file_actions: *posix_spawn_file_actions_t, __fd: c_int, noalias __path: [*:0]const u8, __oflag: c_int, __mode: mode_t) c_int; -pub extern fn posix_spawn_file_actions_addclose(__file_actions: *posix_spawn_file_actions_t, __fd: c_int) c_int; -pub extern fn posix_spawn_file_actions_adddup2(__file_actions: *posix_spawn_file_actions_t, __fd: c_int, __newfd: c_int) c_int; -pub const POSIX_SPAWN_RESETIDS = @as(c_int, 0x01); -pub const POSIX_SPAWN_SETPGROUP = @as(c_int, 0x02); -pub const POSIX_SPAWN_SETSIGDEF = @as(c_int, 0x04); -pub const POSIX_SPAWN_SETSIGMASK = @as(c_int, 0x08); -pub const POSIX_SPAWN_SETSCHEDPARAM = @as(c_int, 0x10); -pub const POSIX_SPAWN_SETSCHEDULER = @as(c_int, 0x20); -pub const POSIX_SPAWN_SETSID = @as(c_int, 0x80); - -const posix_spawn_file_actions_addfchdir_np_type = *const fn (actions: *posix_spawn_file_actions_t, filedes: fd_t) c_int; -const posix_spawn_file_actions_addchdir_np_type = *const fn (actions: *posix_spawn_file_actions_t, path: [*:0]const u8) c_int; - -/// When not available, these functions will return 0. -pub fn posix_spawn_file_actions_addfchdir_np(actions: *posix_spawn_file_actions_t, filedes: std.posix.fd_t) c_int { - const function = bun.C.dlsym(posix_spawn_file_actions_addfchdir_np_type, "posix_spawn_file_actions_addfchdir_np") orelse - return 0; - return function(actions, filedes); -} - -/// When not available, these functions will return 0. -pub fn posix_spawn_file_actions_addchdir_np(actions: *posix_spawn_file_actions_t, path: [*:0]const u8) c_int { - const function = bun.C.dlsym(posix_spawn_file_actions_addchdir_np_type, "posix_spawn_file_actions_addchdir_np") orelse - return 0; - return function(actions, path); -} - -pub extern fn vmsplice(fd: c_int, iovec: [*]const std.posix.iovec, iovec_count: usize, flags: u32) isize; - -pub const FD_CLOEXEC = bun.c.FD_CLOEXEC; -pub const freeifaddrs = bun.c.freeifaddrs; -pub const getifaddrs = bun.c.getifaddrs; -pub const ifaddrs = bun.c.ifaddrs; -pub const IFF_LOOPBACK = bun.c.IFF_LOOPBACK; -pub const IFF_RUNNING = bun.c.IFF_RUNNING; -pub const IFF_UP = bun.c.IFF_UP; -pub const MSG_DONTWAIT = bun.c.MSG_DONTWAIT; -pub const MSG_NOSIGNAL = bun.c.MSG_NOSIGNAL; - -pub const F = struct { - pub const DUPFD_CLOEXEC = bun.c.F_DUPFD_CLOEXEC; - pub const DUPFD = bun.c.F_DUPFD; -}; - -pub const Mode = u32; -pub const E = std.posix.E; -pub const S = std.posix.S; - -pub extern "c" fn umask(Mode) Mode; - -pub fn getErrno(rc: anytype) E { - const Type = @TypeOf(rc); - - return switch (Type) { - // raw system calls from std.os.linux.* will return usize - // the errno is stored in this value - usize => { - const signed: isize = @bitCast(rc); - const int = if (signed > -4096 and signed < 0) -signed else 0; - return @enumFromInt(int); - }, - - // glibc system call wrapper returns i32/int - // the errno is stored in a thread local variable - // - // TODO: the inclusion of 'u32' and 'isize' seems suspicious - i32, c_int, u32, isize, i64 => if (rc == -1) - @enumFromInt(std.c._errno().*) - else - .SUCCESS, - - else => @compileError("Not implemented yet for type " ++ @typeName(Type)), - }; -} - -pub const getuid = std.os.linux.getuid; -pub const getgid = std.os.linux.getgid; -pub const linux_fs = bun.c; - -/// https://man7.org/linux/man-pages/man2/ioctl_ficlone.2.html -/// -/// Support for FICLONE is dependent on the filesystem driver. -pub fn ioctl_ficlone(dest_fd: bun.FileDescriptor, srcfd: bun.FileDescriptor) usize { - return std.os.linux.ioctl(dest_fd.cast(), bun.c.FICLONE, @intCast(srcfd.native())); -} - -pub const RWFFlagSupport = enum(u8) { - unknown = 0, - unsupported = 2, - supported = 1, - - var rwf_bool = std.atomic.Value(RWFFlagSupport).init(RWFFlagSupport.unknown); - - pub fn isLinuxKernelVersionWithBuggyRWF_NONBLOCK() bool { - return bun.linuxKernelVersion().major == 5 and switch (bun.linuxKernelVersion().minor) { - 9, 10 => true, - else => false, - }; - } - - pub fn disable() void { - rwf_bool.store(.unsupported, .monotonic); - } - - /// Workaround for https://github.com/google/gvisor/issues/2601 - pub fn isMaybeSupported() bool { - if (comptime !bun.Environment.isLinux) return false; - switch (rwf_bool.load(.monotonic)) { - .unknown => { - if (isLinuxKernelVersionWithBuggyRWF_NONBLOCK() or bun.getRuntimeFeatureFlag("BUN_FEATURE_FLAG_DISABLE_RWF_NONBLOCK")) { - rwf_bool.store(.unsupported, .monotonic); - return false; - } - - rwf_bool.store(.supported, .monotonic); - return true; - }, - .supported => { - return true; - }, - else => { - return false; - }, - } - - unreachable; - } -}; - -pub extern "c" fn sys_preadv2( - fd: c_int, - iov: [*]const std.posix.iovec, - iovcnt: c_int, - offset: std.posix.off_t, - flags: c_uint, -) isize; - -pub extern "c" fn sys_pwritev2( - fd: c_int, - iov: [*]const std.posix.iovec_const, - iovcnt: c_int, - offset: std.posix.off_t, - flags: c_uint, -) isize; - -// #define RENAME_NOREPLACE (1 << 0) /* Don't overwrite target */ -// #define RENAME_EXCHANGE (1 << 1) /* Exchange source and dest */ -// #define RENAME_WHITEOUT (1 << 2) /* Whiteout source */ - -pub const RENAME_NOREPLACE = 1 << 0; -pub const RENAME_EXCHANGE = 1 << 1; -pub const RENAME_WHITEOUT = 1 << 2; - -pub extern "c" fn quick_exit(code: c_int) noreturn; -pub extern "c" fn memrchr(ptr: [*]const u8, val: c_int, len: usize) ?[*]const u8; - -export fn sys_epoll_pwait2(epfd: i32, events: ?[*]std.os.linux.epoll_event, maxevents: i32, timeout: ?*const std.os.linux.timespec, sigmask: ?*const std.os.linux.sigset_t) isize { - return @bitCast( - std.os.linux.syscall6( - .epoll_pwait2, - @bitCast(@as(isize, @intCast(epfd))), - @intFromPtr(events), - @bitCast(@as(isize, @intCast(maxevents))), - @intFromPtr(timeout), - @intFromPtr(sigmask), - // This is the correct value. glibc claims to pass `sizeof sigset_t` for this argument, - // which would be 128, but they actually pass 8 which is what the kernel expects. - // https://github.com/ziglang/zig/issues/12715 - 8, - ), - ); -} - -// ********************************************************************************* -// libc overrides -// ********************************************************************************* - -fn simulateLibcErrno(rc: usize) c_int { - const signed: isize = @bitCast(rc); - const int: c_int = @intCast(if (signed > -4096 and signed < 0) -signed else 0); - std.c._errno().* = int; - return if (signed > -4096 and signed < 0) -1 else int; -} - -pub export fn stat(path: [*:0]const u8, buf: *std.os.linux.Stat) c_int { - // https://git.musl-libc.org/cgit/musl/tree/src/stat/stat.c - const rc = std.os.linux.fstatat(std.os.linux.AT.FDCWD, path, buf, 0); - return simulateLibcErrno(rc); -} - -pub const stat64 = stat; -pub const lstat64 = lstat; -pub const fstat64 = fstat; -pub const fstatat64 = fstatat; - -pub export fn lstat(path: [*:0]const u8, buf: *std.os.linux.Stat) c_int { - // https://git.musl-libc.org/cgit/musl/tree/src/stat/lstat.c - const rc = std.os.linux.fstatat(std.os.linux.AT.FDCWD, path, buf, std.os.linux.AT.SYMLINK_NOFOLLOW); - return simulateLibcErrno(rc); -} - -pub export fn fstat(fd: c_int, buf: *std.os.linux.Stat) c_int { - const rc = std.os.linux.fstat(fd, buf); - return simulateLibcErrno(rc); -} - -pub export fn fstatat(dirfd: i32, path: [*:0]const u8, buf: *std.os.linux.Stat, flags: u32) c_int { - const rc = std.os.linux.fstatat(dirfd, path, buf, flags); - return simulateLibcErrno(rc); -} - -pub export fn statx(dirfd: i32, path: [*:0]const u8, flags: u32, mask: u32, buf: *std.os.linux.Statx) c_int { - const rc = std.os.linux.statx(dirfd, path, flags, mask, buf); - return simulateLibcErrno(rc); -} - -comptime { - _ = stat; - _ = stat64; - _ = lstat; - _ = lstat64; - _ = fstat; - _ = fstat64; - _ = fstatat; - _ = statx; - @export(&stat, .{ .name = "stat64" }); - @export(&lstat, .{ .name = "lstat64" }); - @export(&fstat, .{ .name = "fstat64" }); - @export(&fstatat, .{ .name = "fstatat64" }); -} - -// ********************************************************************************* diff --git a/src/logger.zig b/src/logger.zig index e9315e5d04..fe16e4b53b 100644 --- a/src/logger.zig +++ b/src/logger.zig @@ -11,7 +11,7 @@ const strings = bun.strings; const MutableString = bun.MutableString; const stringZ = bun.stringZ; const default_allocator = bun.default_allocator; -const C = bun.C; + const JSC = bun.JSC; const fs = @import("fs.zig"); const unicode = std.unicode; @@ -448,8 +448,8 @@ pub const Msg = struct { pub fn toJS(this: Msg, globalObject: *bun.JSC.JSGlobalObject, allocator: std.mem.Allocator) JSC.JSValue { return switch (this.metadata) { - .build => JSC.BuildMessage.create(globalObject, allocator, this), - .resolve => JSC.ResolveMessage.create(globalObject, allocator, this, ""), + .build => bun.api.BuildMessage.create(globalObject, allocator, this), + .resolve => bun.api.ResolveMessage.create(globalObject, allocator, this, ""), }; } @@ -758,15 +758,15 @@ pub const Log = struct { 1 => { const msg = msgs[0]; return switch (msg.metadata) { - .build => JSC.BuildMessage.create(global, allocator, msg), - .resolve => JSC.ResolveMessage.create(global, allocator, msg, ""), + .build => bun.api.BuildMessage.create(global, allocator, msg), + .resolve => bun.api.ResolveMessage.create(global, allocator, msg, ""), }; }, else => { for (msgs[0..count], 0..) |msg, i| { errors_stack[i] = switch (msg.metadata) { - .build => JSC.BuildMessage.create(global, allocator, msg), - .resolve => JSC.ResolveMessage.create(global, allocator, msg, ""), + .build => bun.api.BuildMessage.create(global, allocator, msg), + .resolve => bun.api.ResolveMessage.create(global, allocator, msg, ""), }; } const out = JSC.ZigString.init(message); diff --git a/src/macho.zig b/src/macho.zig index f147dfe72f..00120d5855 100644 --- a/src/macho.zig +++ b/src/macho.zig @@ -171,7 +171,7 @@ pub const MachoFile = struct { // We need to shift [...data after __BUN] forward by size_diff bytes. const after_bun_slice = self.data.items[original_data_end + @as(usize, @intCast(size_diff)) ..]; const prev_after_bun_slice = prev_data_slice[original_segsize..]; - bun.C.move(after_bun_slice, prev_after_bun_slice); + bun.move(after_bun_slice, prev_after_bun_slice); // Now we copy the u32 size header std.mem.writeInt(u32, self.data.items[original_fileoff..][0..4], @intCast(data.len), .little); diff --git a/src/main.zig b/src/main.zig index 74813f5533..2988946c71 100644 --- a/src/main.zig +++ b/src/main.zig @@ -81,6 +81,6 @@ pub fn copyBackwards(comptime T: type, dest: []T, source: []const T) void { bun.copy(T, dest[0..source.len], source); } pub fn eqlBytes(src: []const u8, dest: []const u8) bool { - return bun.C.memcmp(src.ptr, dest.ptr, src.len) == 0; + return bun.c.memcmp(src.ptr, dest.ptr, src.len) == 0; } // -- End Zig Standard Library Additions -- diff --git a/src/napi/napi.zig b/src/napi/napi.zig index 897e19f623..504bbef5a8 100644 --- a/src/napi/napi.zig +++ b/src/napi/napi.zig @@ -81,8 +81,8 @@ pub const napi_ref = *Ref; pub const NapiHandleScope = opaque { pub extern fn NapiHandleScope__open(env: *NapiEnv, escapable: bool) ?*NapiHandleScope; pub extern fn NapiHandleScope__close(env: *NapiEnv, current: ?*NapiHandleScope) void; - extern fn NapiHandleScope__append(env: *NapiEnv, value: JSC.JSValueReprInt) void; - extern fn NapiHandleScope__escape(handleScope: *NapiHandleScope, value: JSC.JSValueReprInt) bool; + extern fn NapiHandleScope__append(env: *NapiEnv, value: JSC.JSValue.backing_int) void; + extern fn NapiHandleScope__escape(handleScope: *NapiHandleScope, value: JSC.JSValue.backing_int) bool; /// Create a new handle scope in the given environment, or return null if creating one now is /// unsafe (i.e. inside a finalizer) @@ -1457,7 +1457,7 @@ pub const ThreadSafeFunction = struct { lock: std.Thread.Mutex = .{}, event_loop: *JSC.EventLoop, - tracker: JSC.AsyncTaskTracker, + tracker: JSC.Debugger.AsyncTaskTracker, env: *NapiEnv, @@ -1780,7 +1780,7 @@ pub export fn napi_create_threadsafe_function( .queue = ThreadSafeFunction.Queue.init(max_queue_size, bun.default_allocator), .thread_count = .{ .raw = @intCast(initial_thread_count) }, .poll_ref = Async.KeepAlive.init(), - .tracker = JSC.AsyncTaskTracker.init(vm), + .tracker = JSC.Debugger.AsyncTaskTracker.init(vm), }); function.finalizer = .{ .env = env, .data = thread_finalize_data, .fun = thread_finalize_cb }; diff --git a/src/open.zig b/src/open.zig index 250e187b72..ca21b28110 100644 --- a/src/open.zig +++ b/src/open.zig @@ -7,7 +7,7 @@ const strings = bun.strings; const MutableString = bun.MutableString; const stringZ = bun.stringZ; const default_allocator = bun.default_allocator; -const C = bun.C; + const std = @import("std"); const DotEnv = @import("env_loader.zig"); diff --git a/src/options.zig b/src/options.zig index 0a508aed16..2c76040670 100644 --- a/src/options.zig +++ b/src/options.zig @@ -20,7 +20,7 @@ const MutableString = bun.MutableString; const FileDescriptorType = bun.FileDescriptor; const stringZ = bun.stringZ; const default_allocator = bun.default_allocator; -const C = bun.C; + const StoredFileDescriptorType = bun.StoredFileDescriptorType; const JSC = bun.JSC; const Runtime = @import("./runtime.zig").Runtime; @@ -90,7 +90,7 @@ pub const ExternalModules = struct { }; pub fn isNodeBuiltin(str: string) bool { - return bun.JSC.HardcodedModule.Alias.has(str, .node); + return bun.JSC.ModuleLoader.HardcodedModule.Alias.has(str, .node); } const default_wildcard_patterns = &[_]WildcardPattern{ diff --git a/src/output.zig b/src/output.zig index 40151ee6be..59856ccfe3 100644 --- a/src/output.zig +++ b/src/output.zig @@ -246,8 +246,9 @@ pub const Source = struct { return bun_is_stdio_null[0] == 1; } + pub extern "c" fn bun_initialize_process() void; pub fn init() void { - bun.C.bun_initialize_process(); + bun_initialize_process(); if (Environment.isWindows) { WindowsStdio.init(); @@ -264,11 +265,12 @@ pub const Source = struct { } } + pub extern "c" fn bun_restore_stdio() void; pub fn restore() void { if (Environment.isWindows) { WindowsStdio.restore(); } else { - bun.C.bun_restore_stdio(); + bun_restore_stdio(); } } }; @@ -1155,7 +1157,7 @@ pub inline fn err(error_name: anytype, comptime fmt: []const u8, args: anytype) // enums if (info == .@"enum") { - const errno: bun.C.SystemErrno = @enumFromInt(@intFromEnum(info)); + const errno: bun.sys.SystemErrno = @enumFromInt(@intFromEnum(info)); break :display_name .{ @tagName(errno), false }; } diff --git a/src/patch.zig b/src/patch.zig index a5c9263f02..f6c6116c03 100644 --- a/src/patch.zig +++ b/src/patch.zig @@ -85,7 +85,7 @@ pub const PatchFile = struct { abs_patch_dir, todir, }, .auto); - var nodefs = bun.JSC.Node.NodeFS{}; + var nodefs = bun.api.node.fs.NodeFS{}; if (nodefs.mkdirRecursive(.{ .path = .{ .string = bun.PathString.init(path_to_make) }, .recursive = true, @@ -102,7 +102,7 @@ pub const PatchFile = struct { const filedir = bun.path.dirname(filepath.slice(), .auto); const mode = part.file_creation.mode; - var nodefs = bun.JSC.Node.NodeFS{}; + var nodefs = bun.api.node.fs.NodeFS{}; if (filedir.len > 0) { if (nodefs.mkdirRecursive(.{ .path = .{ .string = bun.PathString.init(filedir) }, @@ -1078,7 +1078,7 @@ const PatchLinesParser = struct { pub const TestingAPIs = struct { pub fn makeDiff(globalThis: *JSC.JSGlobalObject, callframe: *JSC.CallFrame) bun.JSError!JSC.JSValue { const arguments_ = callframe.arguments_old(2); - var arguments = JSC.Node.ArgumentsSlice.init(globalThis.bunVM(), arguments_.slice()); + var arguments = JSC.CallFrame.ArgumentsSlice.init(globalThis.bunVM(), arguments_.slice()); const old_folder_jsval = arguments.nextEat() orelse { return globalThis.throw("expected 2 strings", .{}); @@ -1141,7 +1141,7 @@ pub const TestingAPIs = struct { /// Used in JS tests, see `internal-for-testing.ts` and patch tests. pub fn parse(globalThis: *JSC.JSGlobalObject, callframe: *JSC.CallFrame) bun.JSError!JSC.JSValue { const arguments_ = callframe.arguments_old(2); - var arguments = JSC.Node.ArgumentsSlice.init(globalThis.bunVM(), arguments_.slice()); + var arguments = JSC.CallFrame.ArgumentsSlice.init(globalThis.bunVM(), arguments_.slice()); const patchfile_src_js = arguments.nextEat() orelse { return globalThis.throw("TestingAPIs.parse: expected at least 1 argument, got 0", .{}); @@ -1165,7 +1165,7 @@ pub const TestingAPIs = struct { pub fn parseApplyArgs(globalThis: *JSC.JSGlobalObject, callframe: *JSC.CallFrame) bun.JSC.Node.Maybe(ApplyArgs, JSC.JSValue) { const arguments_ = callframe.arguments_old(2); - var arguments = JSC.Node.ArgumentsSlice.init(globalThis.bunVM(), arguments_.slice()); + var arguments = JSC.CallFrame.ArgumentsSlice.init(globalThis.bunVM(), arguments_.slice()); const patchfile_js = arguments.nextEat() orelse { globalThis.throw("apply: expected at least 1 argument, got 0", .{}) catch {}; diff --git a/src/perf.zig b/src/perf.zig index c717486f1f..218f115a83 100644 --- a/src/perf.zig +++ b/src/perf.zig @@ -97,7 +97,7 @@ pub fn trace(comptime name: [:0]const u8) Ctx { } pub const Darwin = struct { - const OSLog = bun.C.OSLog; + const OSLog = bun.darwin.OSLog; interval: OSLog.Signpost.Interval, pub fn init(comptime name: i32) @This() { diff --git a/src/ptr/tagged_pointer.zig b/src/ptr/tagged_pointer.zig index b573dfed6a..36a3443d20 100644 --- a/src/ptr/tagged_pointer.zig +++ b/src/ptr/tagged_pointer.zig @@ -6,7 +6,6 @@ const Global = bun.Global; const Environment = bun.Environment; const strings = bun.strings; const default_allocator = bun.default_allocator; -const C = bun.C; const AddressableSize = u49; diff --git a/src/renamer.zig b/src/renamer.zig index 628ac2d9dc..ff9d90e284 100644 --- a/src/renamer.zig +++ b/src/renamer.zig @@ -8,7 +8,7 @@ const strings = bun.strings; const MutableString = bun.MutableString; const stringZ = bun.stringZ; const default_allocator = bun.default_allocator; -const C = bun.C; + const std = @import("std"); const Ref = @import("./ast/base.zig").Ref; const RefCtx = @import("./ast/base.zig").RefCtx; diff --git a/src/resolver/data_url.zig b/src/resolver/data_url.zig index f8b1116f86..881a467a46 100644 --- a/src/resolver/data_url.zig +++ b/src/resolver/data_url.zig @@ -7,7 +7,6 @@ const strings = bun.strings; const MutableString = bun.MutableString; const stringZ = bun.stringZ; const default_allocator = bun.default_allocator; -const C = bun.C; const std = @import("std"); const Allocator = std.mem.Allocator; diff --git a/src/resolver/dir_info.zig b/src/resolver/dir_info.zig index bffa2ff06e..fc71752f94 100644 --- a/src/resolver/dir_info.zig +++ b/src/resolver/dir_info.zig @@ -8,7 +8,7 @@ const strings = bun.strings; const MutableString = bun.MutableString; const stringZ = bun.stringZ; const default_allocator = bun.default_allocator; -const C = bun.C; + const StoredFileDescriptorType = bun.StoredFileDescriptorType; const FeatureFlags = bun.FeatureFlags; diff --git a/src/resolver/package_json.zig b/src/resolver/package_json.zig index 4162430543..525bca08a1 100644 --- a/src/resolver/package_json.zig +++ b/src/resolver/package_json.zig @@ -8,7 +8,7 @@ const MutableString = bun.MutableString; const StoredFileDescriptorType = bun.StoredFileDescriptorType; const stringZ = bun.stringZ; const default_allocator = bun.default_allocator; -const C = bun.C; + const Api = @import("../api/schema.zig").Api; const std = @import("std"); const options = @import("../options.zig"); diff --git a/src/resolver/resolver.zig b/src/resolver/resolver.zig index cdce9d1625..9a7574b07a 100644 --- a/src/resolver/resolver.zig +++ b/src/resolver/resolver.zig @@ -10,7 +10,7 @@ const PathString = bun.PathString; const stringZ = bun.stringZ; const default_allocator = bun.default_allocator; const FD = bun.FD; -const C = bun.C; + const ast = @import("../import_record.zig"); const logger = bun.logger; const options = @import("../options.zig"); @@ -720,7 +720,7 @@ pub const Resolver = struct { if (r.opts.mark_builtins_as_external) { if (strings.hasPrefixComptime(import_path, "node:") or strings.hasPrefixComptime(import_path, "bun:") or - bun.JSC.HardcodedModule.Alias.has(import_path, r.opts.target)) + bun.jsc.ModuleLoader.HardcodedModule.Alias.has(import_path, r.opts.target)) { return .{ .success = Result{ @@ -1248,7 +1248,7 @@ pub const Resolver = struct { if (had_node_prefix) { // Module resolution fails automatically for unknown node builtins - if (!bun.JSC.HardcodedModule.Alias.has(import_path_without_node_prefix, .node)) { + if (!bun.JSC.ModuleLoader.HardcodedModule.Alias.has(import_path_without_node_prefix, .node)) { return .{ .not_found = {} }; } @@ -3166,7 +3166,7 @@ pub const Resolver = struct { // } // if (r.opts.mark_builtins_as_external or r.opts.target.isBun()) { - if (JSC.HardcodedModule.Alias.get(esm_resolution.path, r.opts.target)) |alias| { + if (JSC.ModuleLoader.HardcodedModule.Alias.get(esm_resolution.path, r.opts.target)) |alias| { return .{ .success = .{ .path_pair = .{ .primary = bun.fs.Path.init(alias.path) }, @@ -4356,7 +4356,7 @@ pub const GlobalCache = enum { comptime { _ = Resolver.Resolver__propForRequireMainPaths; - @export(&JSC.toJSHostFunction(Resolver.nodeModulePathsForJS), .{ .name = "Resolver__nodeModulePathsForJS" }); + @export(&JSC.toJSHostFn(Resolver.nodeModulePathsForJS), .{ .name = "Resolver__nodeModulePathsForJS" }); @export(&Resolver.nodeModulePathsJSValue, .{ .name = "Resolver__nodeModulePathsJSValue" }); } diff --git a/src/resolver/tsconfig_json.zig b/src/resolver/tsconfig_json.zig index d757fd9b0b..aba33f571d 100644 --- a/src/resolver/tsconfig_json.zig +++ b/src/resolver/tsconfig_json.zig @@ -7,7 +7,7 @@ const strings = bun.strings; const MutableString = bun.MutableString; const stringZ = bun.stringZ; const default_allocator = bun.default_allocator; -const C = bun.C; + const std = @import("std"); const options = @import("../options.zig"); const logger = bun.logger; diff --git a/src/router.zig b/src/router.zig index e4968f7009..82aa4d2df8 100644 --- a/src/router.zig +++ b/src/router.zig @@ -18,7 +18,7 @@ const strings = bun.strings; const MutableString = bun.MutableString; const stringZ = bun.stringZ; const default_allocator = bun.default_allocator; -const C = bun.C; + const StoredFileDescriptorType = bun.StoredFileDescriptorType; const DirInfo = @import("./resolver/dir_info.zig"); const Fs = @import("./fs.zig"); diff --git a/src/runtime.zig b/src/runtime.zig index 6b14f42651..a03f6914c4 100644 --- a/src/runtime.zig +++ b/src/runtime.zig @@ -8,7 +8,7 @@ const strings = bun.strings; const MutableString = bun.MutableString; const stringZ = bun.stringZ; const default_allocator = bun.default_allocator; -const C = bun.C; + const std = @import("std"); const resolve_path = @import("./resolver/resolve_path.zig"); const Fs = @import("./fs.zig"); diff --git a/src/s3/client.zig b/src/s3/client.zig index cbf52173d7..e48a1f3b08 100644 --- a/src/s3/client.zig +++ b/src/s3/client.zig @@ -184,7 +184,7 @@ pub fn listObjects( search_params.deinitWithAllocator(bun.default_allocator); - const headers = JSC.WebCore.Headers.fromPicoHttpHeaders(result.headers(), bun.default_allocator) catch bun.outOfMemory(); + const headers = bun.http.Headers.fromPicoHttpHeaders(result.headers(), bun.default_allocator) catch bun.outOfMemory(); const task = bun.new(S3HttpSimpleTask, .{ .http = undefined, @@ -406,9 +406,9 @@ pub fn onUploadStreamRejectRequestStream(globalThis: *JSC.JSGlobalObject, callfr return .undefined; } comptime { - const jsonResolveRequestStream = JSC.toJSHostFunction(onUploadStreamResolveRequestStream); + const jsonResolveRequestStream = JSC.toJSHostFn(onUploadStreamResolveRequestStream); @export(&jsonResolveRequestStream, .{ .name = "Bun__S3UploadStream__onResolveRequestStream" }); - const jsonRejectRequestStream = JSC.toJSHostFunction(onUploadStreamRejectRequestStream); + const jsonRejectRequestStream = JSC.toJSHostFn(onUploadStreamRejectRequestStream); @export(&jsonRejectRequestStream, .{ .name = "Bun__S3UploadStream__onRejectRequestStream" }); } @@ -633,9 +633,9 @@ pub fn downloadStream( const headers = brk: { if (range) |range_| { const _headers = result.mixWithHeader(&header_buffer, .{ .name = "range", .value = range_ }); - break :brk JSC.WebCore.Headers.fromPicoHttpHeaders(_headers, bun.default_allocator) catch bun.outOfMemory(); + break :brk bun.http.Headers.fromPicoHttpHeaders(_headers, bun.default_allocator) catch bun.outOfMemory(); } else { - break :brk JSC.WebCore.Headers.fromPicoHttpHeaders(result.headers(), bun.default_allocator) catch bun.outOfMemory(); + break :brk bun.http.Headers.fromPicoHttpHeaders(result.headers(), bun.default_allocator) catch bun.outOfMemory(); } }; const proxy = proxy_url orelse ""; diff --git a/src/s3/download_stream.zig b/src/s3/download_stream.zig index 50e0789fde..bd8405f18f 100644 --- a/src/s3/download_stream.zig +++ b/src/s3/download_stream.zig @@ -13,7 +13,7 @@ pub const S3HttpDownloadStreamingTask = struct { http: bun.http.AsyncHTTP, vm: *JSC.VirtualMachine, sign_result: SignResult, - headers: JSC.WebCore.Headers, + headers: bun.http.Headers, callback_context: *anyopaque, // this transfers ownership from the chunk callback: *const fn (chunk: bun.MutableString, has_more: bool, err: ?S3Error, *anyopaque) void, diff --git a/src/s3/simple_request.zig b/src/s3/simple_request.zig index 9610955b95..e65f7b7800 100644 --- a/src/s3/simple_request.zig +++ b/src/s3/simple_request.zig @@ -74,7 +74,7 @@ pub const S3HttpSimpleTask = struct { http: bun.http.AsyncHTTP, vm: *JSC.VirtualMachine, sign_result: SignResult, - headers: JSC.WebCore.Headers, + headers: bun.http.Headers, callback_context: *anyopaque, callback: Callback, response_buffer: bun.MutableString = .{ @@ -397,16 +397,16 @@ pub fn executeSimpleS3Request( var header_buffer: [10]picohttp.Header = undefined; if (options.range) |range_| { const _headers = result.mixWithHeader(&header_buffer, .{ .name = "range", .value = range_ }); - break :brk JSC.WebCore.Headers.fromPicoHttpHeaders(_headers, bun.default_allocator) catch bun.outOfMemory(); + break :brk bun.http.Headers.fromPicoHttpHeaders(_headers, bun.default_allocator) catch bun.outOfMemory(); } else { if (options.content_type) |content_type| { if (content_type.len > 0) { const _headers = result.mixWithHeader(&header_buffer, .{ .name = "Content-Type", .value = content_type }); - break :brk JSC.WebCore.Headers.fromPicoHttpHeaders(_headers, bun.default_allocator) catch bun.outOfMemory(); + break :brk bun.http.Headers.fromPicoHttpHeaders(_headers, bun.default_allocator) catch bun.outOfMemory(); } } - break :brk JSC.WebCore.Headers.fromPicoHttpHeaders(result.headers(), bun.default_allocator) catch bun.outOfMemory(); + break :brk bun.http.Headers.fromPicoHttpHeaders(result.headers(), bun.default_allocator) catch bun.outOfMemory(); } }; const task = S3HttpSimpleTask.new(.{ diff --git a/src/semver/ExternalString.zig b/src/semver/ExternalString.zig index 5f143456c1..98c2708245 100644 --- a/src/semver/ExternalString.zig +++ b/src/semver/ExternalString.zig @@ -56,7 +56,7 @@ const strings = bun.strings; const MutableString = bun.MutableString; const stringZ = bun.stringZ; const default_allocator = bun.default_allocator; -const C = bun.C; + const JSC = bun.JSC; const IdentityContext = bun.IdentityContext; const OOM = bun.OOM; diff --git a/src/semver/SemverObject.zig b/src/semver/SemverObject.zig index df73920dd5..b341b0d2b4 100644 --- a/src/semver/SemverObject.zig +++ b/src/semver/SemverObject.zig @@ -4,7 +4,7 @@ pub fn create(globalThis: *JSC.JSGlobalObject) JSC.JSValue { object.put( globalThis, JSC.ZigString.static("satisfies"), - JSC.NewFunction( + JSC.host_fn.NewFunction( globalThis, JSC.ZigString.static("satisfies"), 2, @@ -16,7 +16,7 @@ pub fn create(globalThis: *JSC.JSGlobalObject) JSC.JSValue { object.put( globalThis, JSC.ZigString.static("order"), - JSC.NewFunction( + JSC.host_fn.NewFunction( globalThis, JSC.ZigString.static("order"), 2, @@ -136,7 +136,7 @@ const strings = bun.strings; const MutableString = bun.MutableString; const stringZ = bun.stringZ; const default_allocator = bun.default_allocator; -const C = bun.C; + const JSC = bun.JSC; const IdentityContext = @import("../identity_context.zig").IdentityContext; const OOM = bun.OOM; diff --git a/src/semver/SemverQuery.zig b/src/semver/SemverQuery.zig index a5961f0b77..0e1a667763 100644 --- a/src/semver/SemverQuery.zig +++ b/src/semver/SemverQuery.zig @@ -787,7 +787,7 @@ const strings = bun.strings; const MutableString = bun.MutableString; const stringZ = bun.stringZ; const default_allocator = bun.default_allocator; -const C = bun.C; + const JSC = bun.JSC; const IdentityContext = @import("../identity_context.zig").IdentityContext; const OOM = bun.OOM; diff --git a/src/semver/SemverRange.zig b/src/semver/SemverRange.zig index b766e69fef..a8748369b9 100644 --- a/src/semver/SemverRange.zig +++ b/src/semver/SemverRange.zig @@ -259,7 +259,7 @@ const strings = bun.strings; const MutableString = bun.MutableString; const stringZ = bun.stringZ; const default_allocator = bun.default_allocator; -const C = bun.C; + const JSC = bun.JSC; const IdentityContext = @import("../identity_context.zig").IdentityContext; const OOM = bun.OOM; diff --git a/src/semver/SemverString.zig b/src/semver/SemverString.zig index 0bb43c9e0e..3937fa0b36 100644 --- a/src/semver/SemverString.zig +++ b/src/semver/SemverString.zig @@ -618,7 +618,7 @@ const strings = bun.strings; const MutableString = bun.MutableString; const stringZ = bun.stringZ; const default_allocator = bun.default_allocator; -const C = bun.C; + const JSC = bun.JSC; const IdentityContext = bun.IdentityContext; const OOM = bun.OOM; diff --git a/src/semver/SlicedString.zig b/src/semver/SlicedString.zig index 7cc66a91d2..9471d173e3 100644 --- a/src/semver/SlicedString.zig +++ b/src/semver/SlicedString.zig @@ -48,7 +48,7 @@ const strings = bun.strings; const MutableString = bun.MutableString; const stringZ = bun.stringZ; const default_allocator = bun.default_allocator; -const C = bun.C; + const JSC = bun.JSC; const IdentityContext = bun.IdentityContext; const OOM = bun.OOM; diff --git a/src/semver/Version.zig b/src/semver/Version.zig index a5725f4659..cbc0d733e8 100644 --- a/src/semver/Version.zig +++ b/src/semver/Version.zig @@ -996,7 +996,7 @@ const strings = bun.strings; const MutableString = bun.MutableString; const stringZ = bun.stringZ; const default_allocator = bun.default_allocator; -const C = bun.C; + const JSC = bun.JSC; const IdentityContext = @import("../identity_context.zig").IdentityContext; const OOM = bun.OOM; diff --git a/src/shell/Builtin.zig b/src/shell/Builtin.zig index f0caaf533d..7c7b44157c 100644 --- a/src/shell/Builtin.zig +++ b/src/shell/Builtin.zig @@ -227,7 +227,7 @@ pub const BuiltinIO = struct { pub const deref = RefCount.deref; ref_count: RefCount, - blob: bun.JSC.WebCore.Blob, + blob: bun.webcore.Blob, fn deinit(this: *Blob) void { this.blob.deinit(); @@ -505,7 +505,7 @@ pub inline fn parentCmdMut(this: *Builtin) *Cmd { pub fn done(this: *Builtin, exit_code: anytype) void { const code: ExitCode = switch (@TypeOf(exit_code)) { - bun.C.E => @intFromEnum(exit_code), + bun.sys.E => @intFromEnum(exit_code), u1, u8, u16 => exit_code, comptime_int => exit_code, else => @compileError("Invalid type: " ++ @typeName(@TypeOf(exit_code))), @@ -592,7 +592,7 @@ pub fn writeNoIO(this: *Builtin, comptime io_kind: @Type(.enum_literal), buf: [] }, .arraybuf => { if (io.arraybuf.i >= io.arraybuf.buf.array_buffer.byte_len) { - return Maybe(usize).initErr(Syscall.Error.fromCode(bun.C.E.NOSPC, .write)); + return Maybe(usize).initErr(bun.sys.Error.fromCode(bun.sys.E.NOSPC, .write)); } const len = buf.len; diff --git a/src/shell/ParsedShellScript.zig b/src/shell/ParsedShellScript.zig index 1bb37137e9..319171a0d1 100644 --- a/src/shell/ParsedShellScript.zig +++ b/src/shell/ParsedShellScript.zig @@ -48,7 +48,7 @@ pub fn finalize( pub fn setCwd(this: *ParsedShellScript, globalThis: *JSGlobalObject, callframe: *JSC.CallFrame) bun.JSError!JSC.JSValue { const arguments_ = callframe.arguments_old(2); - var arguments = JSC.Node.ArgumentsSlice.init(globalThis.bunVM(), arguments_.slice()); + var arguments = JSC.CallFrame.ArgumentsSlice.init(globalThis.bunVM(), arguments_.slice()); const str_js = arguments.nextEat() orelse { return globalThis.throw("$`...`.cwd(): expected a string argument", .{}); }; @@ -177,6 +177,6 @@ const JSValue = JSC.JSValue; const JSGlobalObject = JSC.JSGlobalObject; const CallFrame = JSC.CallFrame; const Node = JSC.Node; -const ArgumentsSlice = JSC.Node.ArgumentsSlice; +const ArgumentsSlice = JSC.CallFrame.ArgumentsSlice; const assert = bun.assert; const log = bun.Output.scoped(.ParsedShellScript, true); diff --git a/src/shell/builtin/cd.zig b/src/shell/builtin/cd.zig index 6ae4448aac..88a5e7b8c1 100644 --- a/src/shell/builtin/cd.zig +++ b/src/shell/builtin/cd.zig @@ -65,7 +65,7 @@ fn handleChangeCwdErr(this: *Cd, err: Syscall.Error, new_cwd_: []const u8) Maybe const errno: usize = @intCast(err.errno); switch (errno) { - @as(usize, @intFromEnum(bun.C.E.NOTDIR)) => { + @as(usize, @intFromEnum(Syscall.E.NOTDIR)) => { if (this.bltn().stderr.needsIO() == null) { const buf = this.bltn().fmtErrorArena(.cd, "not a directory: {s}\n", .{new_cwd_}); _ = this.bltn().writeNoIO(.stderr, buf); @@ -78,7 +78,7 @@ fn handleChangeCwdErr(this: *Cd, err: Syscall.Error, new_cwd_: []const u8) Maybe this.writeStderrNonBlocking("not a directory: {s}\n", .{new_cwd_}); return Maybe(void).success; }, - @as(usize, @intFromEnum(bun.C.E.NOENT)) => { + @as(usize, @intFromEnum(Syscall.E.NOENT)) => { if (this.bltn().stderr.needsIO() == null) { const buf = this.bltn().fmtErrorArena(.cd, "not a directory: {s}\n", .{new_cwd_}); _ = this.bltn().writeNoIO(.stderr, buf); diff --git a/src/shell/builtin/cp.zig b/src/shell/builtin/cp.zig index 736947b73c..9816fabdfa 100644 --- a/src/shell/builtin/cp.zig +++ b/src/shell/builtin/cp.zig @@ -371,7 +371,7 @@ pub const ShellCpTask = struct { return out; } - pub fn ensureDest(nodefs: *JSC.Node.NodeFS, dest: bun.OSPathSliceZ) Maybe(void) { + pub fn ensureDest(nodefs: *JSC.Node.fs.NodeFS, dest: bun.OSPathSliceZ) Maybe(void) { return switch (nodefs.mkdirRecursiveOSPath(dest, JSC.Node.Arguments.Mkdir.DefaultMode, false)) { .err => |err| Maybe(void){ .err = err }, .result => Maybe(void).success, @@ -392,7 +392,7 @@ pub const ShellCpTask = struct { if (bun.Environment.isWindows) { const attributes = bun.sys.getFileAttributes(path[0..path.len]) orelse { const err: Syscall.Error = .{ - .errno = @intFromEnum(bun.C.SystemErrno.ENOENT), + .errno = @intFromEnum(bun.sys.SystemErrno.ENOENT), .syscall = .copyfile, .path = path, }; @@ -487,7 +487,7 @@ pub const ShellCpTask = struct { const tgt_is_dir: bool, const tgt_exists: bool = switch (this.isDir(tgt)) { .result => |is_dir| .{ is_dir, true }, .err => |e| brk: { - if (e.getErrno() == bun.C.E.NOENT) { + if (e.getErrno() == .NOENT) { // If it has a trailing directory separator, its a directory const is_dir = hasTrailingSep(tgt); break :brk .{ is_dir, false }; @@ -539,7 +539,7 @@ pub const ShellCpTask = struct { this.src_absolute = bun.default_allocator.dupeZ(u8, src[0..src.len]) catch bun.outOfMemory(); this.tgt_absolute = bun.default_allocator.dupeZ(u8, tgt[0..tgt.len]) catch bun.outOfMemory(); - const args = JSC.Node.Arguments.Cp{ + const args = JSC.Node.fs.Arguments.Cp{ .src = JSC.Node.PathLike{ .string = bun.PathString.init(this.src_absolute.?) }, .dest = JSC.Node.PathLike{ .string = bun.PathString.init(this.tgt_absolute.?) }, .flags = .{ @@ -555,7 +555,7 @@ pub const ShellCpTask = struct { if (this.event_loop == .js) { const vm: *JSC.VirtualMachine = this.event_loop.js.getVmImpl(); debug("Yoops", .{}); - _ = JSC.Node.ShellAsyncCpTask.createWithShellTask( + _ = bun.api.node.fs.ShellAsyncCpTask.createWithShellTask( vm.global, args, vm, @@ -564,7 +564,7 @@ pub const ShellCpTask = struct { false, ); } else { - _ = JSC.Node.ShellAsyncCpTask.createMini( + _ = bun.api.node.fs.ShellAsyncCpTask.createMini( args, this.event_loop.mini, bun.ArenaAllocator.init(bun.default_allocator), diff --git a/src/shell/builtin/ls.zig b/src/shell/builtin/ls.zig index 19b2c98278..a8b083dbc7 100644 --- a/src/shell/builtin/ls.zig +++ b/src/shell/builtin/ls.zig @@ -247,10 +247,10 @@ pub const ShellLsTask = struct { const fd = switch (ShellSyscall.openat(this.cwd, this.path, bun.O.RDONLY | bun.O.DIRECTORY, 0)) { .err => |e| { switch (e.getErrno()) { - bun.C.E.NOENT => { + .NOENT => { this.err = this.errorWithPath(e, this.path); }, - bun.C.E.NOTDIR => { + .NOTDIR => { this.result_kind = .file; this.addEntry(this.path); }, diff --git a/src/shell/builtin/mkdir.zig b/src/shell/builtin/mkdir.zig index d1095fe341..9f5b8cb9c0 100644 --- a/src/shell/builtin/mkdir.zig +++ b/src/shell/builtin/mkdir.zig @@ -249,10 +249,10 @@ pub const ShellMkdirTask = struct { break :brk ResolvePath.joinZ(parts, .auto); }; - var node_fs = JSC.Node.NodeFS{}; + var node_fs = JSC.Node.fs.NodeFS{}; // Recursive if (this.opts.parents) { - const args = JSC.Node.Arguments.Mkdir{ + const args = JSC.Node.fs.Arguments.Mkdir{ .path = JSC.Node.PathLike{ .string = bun.PathString.init(filepath) }, .recursive = true, .always_return_none = true, @@ -268,7 +268,7 @@ pub const ShellMkdirTask = struct { }, } } else { - const args = JSC.Node.Arguments.Mkdir{ + const args = JSC.Node.fs.Arguments.Mkdir{ .path = JSC.Node.PathLike{ .string = bun.PathString.init(filepath) }, .recursive = false, .always_return_none = true, diff --git a/src/shell/builtin/mv.zig b/src/shell/builtin/mv.zig index ab7d001445..03b9a9b220 100644 --- a/src/shell/builtin/mv.zig +++ b/src/shell/builtin/mv.zig @@ -40,7 +40,7 @@ pub const ShellMvCheckTargetTask = struct { const fd = switch (ShellSyscall.openat(this.cwd, this.target, bun.O.RDONLY | bun.O.DIRECTORY, 0)) { .err => |e| { switch (e.getErrno()) { - bun.C.E.NOTDIR => { + Syscall.E.NOTDIR => { this.result = .{ .result = null }; }, else => { @@ -105,7 +105,7 @@ pub const ShellMvBatchedTask = struct { pub fn moveInDir(this: *@This(), src: [:0]const u8, buf: *bun.PathBuffer) bool { const path_in_dir_ = bun.path.normalizeBuf(ResolvePath.basename(src), buf, .auto); if (path_in_dir_.len + 1 >= buf.len) { - this.err = Syscall.Error.fromCode(bun.C.E.NAMETOOLONG, .rename); + this.err = Syscall.Error.fromCode(Syscall.E.NAMETOOLONG, .rename); return false; } buf[path_in_dir_.len] = 0; @@ -223,7 +223,7 @@ pub fn next(this: *Mv) Maybe(void) { const maybe_fd: ?bun.FileDescriptor = switch (check_target.task.result.?) { .err => |e| brk: { switch (e.getErrno()) { - bun.C.E.NOENT => { + Syscall.E.NOENT => { // Means we are renaming entry, not moving to a directory if (this.args.sources.len == 1) break :brk null; diff --git a/src/shell/builtin/rm.zig b/src/shell/builtin/rm.zig index cf14aff17f..26b3c1fd2b 100644 --- a/src/shell/builtin/rm.zig +++ b/src/shell/builtin/rm.zig @@ -817,11 +817,11 @@ pub const ShellRmTask = struct { .result => return Maybe(void).success, .err => |e| { switch (e.getErrno()) { - bun.C.E.NOENT => { + .NOENT => { if (this.opts.force) return this.verboseDeleted(dir_task, path); return .{ .err = this.errorWithPath(e, path) }; }, - bun.C.E.NOTDIR => { + .NOTDIR => { delete_state.treat_as_dir = false; if (this.removeEntryFile(dir_task, dir_task.path, is_absolute, buf, &delete_state).asErr()) |err| { return .{ .err = this.errorWithPath(err, path) }; @@ -837,7 +837,7 @@ pub const ShellRmTask = struct { } if (!this.opts.recursive) { - return Maybe(void).initErr(Syscall.Error.fromCode(bun.C.E.ISDIR, .TODO).withPath(bun.default_allocator.dupeZ(u8, dir_task.path) catch bun.outOfMemory())); + return Maybe(void).initErr(Syscall.Error.fromCode(bun.sys.E.ISDIR, .TODO).withPath(bun.default_allocator.dupeZ(u8, dir_task.path) catch bun.outOfMemory())); } const flags = bun.O.DIRECTORY | bun.O.RDONLY; @@ -845,11 +845,11 @@ pub const ShellRmTask = struct { .result => |fd| fd, .err => |e| { switch (e.getErrno()) { - bun.C.E.NOENT => { + .NOENT => { if (this.opts.force) return this.verboseDeleted(dir_task, path); return .{ .err = this.errorWithPath(e, path) }; }, - bun.C.E.NOTDIR => { + .NOTDIR => { return this.removeEntryFile(dir_task, dir_task.path, is_absolute, buf, &DummyRemoveFile.dummy); }, else => return .{ .err = this.errorWithPath(e, path) }, @@ -941,7 +941,7 @@ pub const ShellRmTask = struct { }, .err => |e| { switch (e.getErrno()) { - bun.C.E.NOENT => { + .NOENT => { if (this.opts.force) { switch (this.verboseDeleted(dir_task, path)) { .err => |e2| return .{ .err = e2 }, @@ -1046,14 +1046,14 @@ pub const ShellRmTask = struct { }, .err => |e| { switch (e.getErrno()) { - bun.C.E.NOENT => { + .NOENT => { if (this.opts.force) { _ = this.verboseDeleted(dir_task, dir_task.path); return .{ .result = true }; } return .{ .err = this.errorWithPath(e, dir_task.path) }; }, - bun.C.E.NOTDIR => { + .NOTDIR => { state.treat_as_dir = false; continue; }, @@ -1096,17 +1096,17 @@ pub const ShellRmTask = struct { .err => |e| { debug("unlinkatWithFlags({s}) = {s}", .{ path, @tagName(e.getErrno()) }); switch (e.getErrno()) { - bun.C.E.NOENT => { + bun.sys.E.NOENT => { if (this.opts.force) return this.verboseDeleted(parent_dir_task, path); return .{ .err = this.errorWithPath(e, path) }; }, - bun.C.E.ISDIR => { + bun.sys.E.ISDIR => { return Handler.onIsDir(vtable, parent_dir_task, path, is_absolute, buf); }, // This might happen if the file is actually a directory - bun.C.E.PERM => { + bun.sys.E.PERM => { switch (builtin.os.tag) { // non-Linux POSIX systems and Windows return EPERM when trying to delete a directory, so // we need to handle that case specifically and translate the error @@ -1121,13 +1121,13 @@ pub const ShellRmTask = struct { .err => |e2| { return switch (e2.getErrno()) { // not empty, process directory as we would normally - bun.C.E.NOTEMPTY => { + .NOTEMPTY => { // this.enqueueNoJoin(parent_dir_task, path, .dir); // return Maybe(void).success; return Handler.onDirNotEmpty(vtable, parent_dir_task, path, is_absolute, buf); }, // actually a file, the error is a permissions error - bun.C.E.NOTDIR => .{ .err = this.errorWithPath(e, path) }, + .NOTDIR => .{ .err = this.errorWithPath(e, path) }, else => .{ .err = this.errorWithPath(e2, path) }, }; }, diff --git a/src/shell/builtin/touch.zig b/src/shell/builtin/touch.zig index 10180e8058..3b96ceab92 100644 --- a/src/shell/builtin/touch.zig +++ b/src/shell/builtin/touch.zig @@ -238,20 +238,20 @@ pub const ShellTouchTask = struct { break :brk ResolvePath.joinZ(parts, .auto); }; - var node_fs = JSC.Node.NodeFS{}; + var node_fs = JSC.Node.fs.NodeFS{}; const milliseconds: f64 = @floatFromInt(std.time.milliTimestamp()); const atime: JSC.Node.TimeLike = if (bun.Environment.isWindows) milliseconds / 1000.0 else JSC.Node.TimeLike{ .sec = @intFromFloat(@divFloor(milliseconds, std.time.ms_per_s)), .nsec = @intFromFloat(@mod(milliseconds, std.time.ms_per_s) * std.time.ns_per_ms), }; const mtime = atime; - const args = JSC.Node.Arguments.Utimes{ + const args = JSC.Node.fs.Arguments.Utimes{ .atime = atime, .mtime = mtime, .path = .{ .string = bun.PathString.init(filepath) }, }; if (node_fs.utimes(args, .sync).asErr()) |err| out: { - if (err.getErrno() == bun.C.E.NOENT) { + if (err.getErrno() == .NOENT) { const perm = 0o664; switch (Syscall.open(filepath, bun.O.CREAT | bun.O.WRONLY, perm)) { .result => |fd| { diff --git a/src/shell/interpreter.zig b/src/shell/interpreter.zig index 96067524b4..cb677823ee 100644 --- a/src/shell/interpreter.zig +++ b/src/shell/interpreter.zig @@ -691,7 +691,7 @@ pub const Interpreter = struct { pub fn createShellInterpreter(globalThis: *JSC.JSGlobalObject, callframe: *JSC.CallFrame) bun.JSError!JSValue { const allocator = bun.default_allocator; const arguments_ = callframe.arguments_old(3); - var arguments = JSC.Node.ArgumentsSlice.init(globalThis.bunVM(), arguments_.slice()); + var arguments = JSC.CallFrame.ArgumentsSlice.init(globalThis.bunVM(), arguments_.slice()); const resolve = arguments.nextEat() orelse return globalThis.throw("shell: expected 3 arguments, got 0", .{}); @@ -1147,7 +1147,7 @@ pub const Interpreter = struct { fn ioToJSValue(globalThis: *JSGlobalObject, buf: *bun.ByteList) JSValue { const bytelist = buf.*; buf.* = .{}; - const buffer: JSC.Buffer = .{ + const buffer: JSC.Node.Buffer = .{ .allocator = bun.default_allocator, .buffer = JSC.ArrayBuffer.fromBytes(@constCast(bytelist.slice()), .Uint8Array), }; diff --git a/src/shell/shell.zig b/src/shell/shell.zig index eb89c72c2e..8fb1325114 100644 --- a/src/shell/shell.zig +++ b/src/shell/shell.zig @@ -4282,7 +4282,7 @@ pub const TestingAPIs = struct { if (comptime bun.Environment.isWindows) return JSValue.false; const arguments_ = callframe.arguments_old(1); - var arguments = JSC.Node.ArgumentsSlice.init(globalThis.bunVM(), arguments_.slice()); + var arguments = JSC.CallFrame.ArgumentsSlice.init(globalThis.bunVM(), arguments_.slice()); const string = arguments.nextEat() orelse { return globalThis.throw("shellInternals.disabledOnPosix: expected 1 arguments, got 0", .{}); }; @@ -4305,7 +4305,7 @@ pub const TestingAPIs = struct { callframe: *JSC.CallFrame, ) bun.JSError!JSC.JSValue { const arguments_ = callframe.arguments_old(2); - var arguments = JSC.Node.ArgumentsSlice.init(globalThis.bunVM(), arguments_.slice()); + var arguments = JSC.CallFrame.ArgumentsSlice.init(globalThis.bunVM(), arguments_.slice()); const string_args = arguments.nextEat() orelse { return globalThis.throw("shell_parse: expected 2 arguments, got 0", .{}); }; @@ -4373,7 +4373,7 @@ pub const TestingAPIs = struct { callframe: *JSC.CallFrame, ) bun.JSError!JSC.JSValue { const arguments_ = callframe.arguments_old(2); - var arguments = JSC.Node.ArgumentsSlice.init(globalThis.bunVM(), arguments_.slice()); + var arguments = JSC.CallFrame.ArgumentsSlice.init(globalThis.bunVM(), arguments_.slice()); const string_args = arguments.nextEat() orelse { return globalThis.throw("shell_parse: expected 2 arguments, got 0", .{}); }; diff --git a/src/shell/subproc.zig b/src/shell/subproc.zig index 8f13ca2bb2..9f71903422 100644 --- a/src/shell/subproc.zig +++ b/src/shell/subproc.zig @@ -261,7 +261,7 @@ pub const ShellSubprocess = struct { subprocess.weak_file_sink_stdin_ptr = pipe; return pipe.toJSWithDestructor( globalThis, - JSC.WebCore.SinkDestructor.Ptr.init(subprocess), + JSC.WebCore.sink_destructor.Ptr.init(subprocess), ); } }, @@ -877,7 +877,7 @@ pub const ShellSubprocess = struct { subprocess.process.setExitHandler(subprocess); if (subprocess.stdin == .pipe) { - subprocess.stdin.pipe.signal = JSC.WebCore.Signal.init(&subprocess.stdin); + subprocess.stdin.pipe.signal = bun.webcore.streams.Signal.init(&subprocess.stdin); } if (comptime !is_sync) { diff --git a/src/sql/postgres.zig b/src/sql/postgres.zig index 9d3ff8548c..c325ea5858 100644 --- a/src/sql/postgres.zig +++ b/src/sql/postgres.zig @@ -249,7 +249,7 @@ pub const PostgresSQLContext = struct { } comptime { - const js_init = JSC.toJSHostFunction(init); + const js_init = JSC.toJSHostFn(init); @export(&js_init, .{ .name = "PostgresSQLContext__init" }); } }; @@ -561,7 +561,7 @@ pub const PostgresSQLQuery = struct { pub fn call(globalThis: *JSC.JSGlobalObject, callframe: *JSC.CallFrame) bun.JSError!JSC.JSValue { const arguments = callframe.arguments_old(6).slice(); - var args = JSC.Node.ArgumentsSlice.init(globalThis.bunVM(), arguments); + var args = JSC.CallFrame.ArgumentsSlice.init(globalThis.bunVM(), arguments); defer args.deinit(); const query = args.nextEat() orelse { return globalThis.throw("query must be a string", .{}); @@ -833,7 +833,7 @@ pub const PostgresSQLQuery = struct { } comptime { - const jscall = JSC.toJSHostFunction(call); + const jscall = JSC.toJSHostFn(call); @export(&jscall, .{ .name = "PostgresSQLQuery__createInstance" }); } }; @@ -1208,7 +1208,7 @@ pub const PostgresSQLConnection = struct { /// Before being connected, this is a connection timeout timer. /// After being connected, this is an idle timeout timer. - timer: JSC.BunTimer.EventLoopTimer = .{ + timer: bun.api.Timer.EventLoopTimer = .{ .tag = .PostgresSQLConnectionTimeout, .next = .{ .sec = 0, @@ -1220,7 +1220,7 @@ pub const PostgresSQLConnection = struct { /// It starts when the connection successfully starts (i.e. after handshake is complete). /// It stops when the connection is closed. max_lifetime_interval_ms: u32 = 0, - max_lifetime_timer: JSC.BunTimer.EventLoopTimer = .{ + max_lifetime_timer: bun.api.Timer.EventLoopTimer = .{ .tag = .PostgresSQLConnectionMaxLifetime, .next = .{ .sec = 0, @@ -1455,7 +1455,7 @@ pub const PostgresSQLConnection = struct { this.globalObject.bunVM().timer.insert(&this.max_lifetime_timer); } - pub fn onConnectionTimeout(this: *PostgresSQLConnection) JSC.BunTimer.EventLoopTimer.Arm { + pub fn onConnectionTimeout(this: *PostgresSQLConnection) bun.api.Timer.EventLoopTimer.Arm { debug("onConnectionTimeout", .{}); this.timer.state = .FIRED; @@ -1482,7 +1482,7 @@ pub const PostgresSQLConnection = struct { return .disarm; } - pub fn onMaxLifetimeTimeout(this: *PostgresSQLConnection) JSC.BunTimer.EventLoopTimer.Arm { + pub fn onMaxLifetimeTimeout(this: *PostgresSQLConnection) bun.api.Timer.EventLoopTimer.Arm { debug("onMaxLifetimeTimeout", .{}); this.max_lifetime_timer.state = .FIRED; if (this.status == .failed) return .disarm; @@ -1794,7 +1794,7 @@ pub const PostgresSQLConnection = struct { } comptime { - const jscall = JSC.toJSHostFunction(call); + const jscall = JSC.toJSHostFn(call); @export(&jscall, .{ .name = "PostgresSQLConnection__createInstance" }); } @@ -2403,7 +2403,7 @@ pub const PostgresSQLConnection = struct { }; var stack_buf: [70]DataCell = undefined; - var cells: []DataCell = stack_buf[0..@min(statement.fields.len, JSC.JSC__JSObject__maxInlineCapacity)]; + var cells: []DataCell = stack_buf[0..@min(statement.fields.len, JSC.JSObject.maxInlineCapacity())]; var free_cells = false; defer { for (cells[0..putter.count]) |*cell| { @@ -2412,7 +2412,7 @@ pub const PostgresSQLConnection = struct { if (free_cells) bun.default_allocator.free(cells); } - if (statement.fields.len >= JSC.JSC__JSObject__maxInlineCapacity) { + if (statement.fields.len >= JSC.JSObject.maxInlineCapacity()) { cells = try bun.default_allocator.alloc(DataCell, statement.fields.len); free_cells = true; } @@ -3010,7 +3010,7 @@ pub const PostgresSQLStatement = struct { nonDuplicatedCount -= 1; } } - const ids = if (nonDuplicatedCount <= JSC.JSC__JSObject__maxInlineCapacity) stack_ids[0..nonDuplicatedCount] else bun.default_allocator.alloc(JSC.JSObject.ExternColumnIdentifier, nonDuplicatedCount) catch bun.outOfMemory(); + const ids = if (nonDuplicatedCount <= JSC.JSObject.maxInlineCapacity()) stack_ids[0..nonDuplicatedCount] else bun.default_allocator.alloc(JSC.JSObject.ExternColumnIdentifier, nonDuplicatedCount) catch bun.outOfMemory(); var i: usize = 0; for (this.fields) |*field| { @@ -3034,7 +3034,7 @@ pub const PostgresSQLStatement = struct { i += 1; } - if (nonDuplicatedCount > JSC.JSC__JSObject__maxInlineCapacity) { + if (nonDuplicatedCount > JSC.JSObject.maxInlineCapacity()) { this.cached_structure.set(globalObject, null, ids); } else { this.cached_structure.set(globalObject, JSC.JSObject.createStructure( diff --git a/src/sql/postgres/postgres_protocol.zig b/src/sql/postgres/postgres_protocol.zig index dc9c713750..f35316d7e1 100644 --- a/src/sql/postgres/postgres_protocol.zig +++ b/src/sql/postgres/postgres_protocol.zig @@ -1,6 +1,6 @@ const std = @import("std"); const bun = @import("bun"); -const postgres = bun.JSC.Postgres; +const postgres = bun.api.Postgres; const Data = postgres.Data; const protocol = @This(); const PostgresInt32 = postgres.PostgresInt32; diff --git a/src/sql/postgres/postgres_types.zig b/src/sql/postgres/postgres_types.zig index 15258b8412..12089ff24a 100644 --- a/src/sql/postgres/postgres_types.zig +++ b/src/sql/postgres/postgres_types.zig @@ -1,6 +1,6 @@ const std = @import("std"); const bun = @import("bun"); -const postgres = bun.JSC.Postgres; +const postgres = bun.api.Postgres; const Data = postgres.Data; const protocol = @This(); const PostgresInt32 = postgres.PostgresInt32; diff --git a/src/string.zig b/src/string.zig index 655fc9f2a2..60dffe1198 100644 --- a/src/string.zig +++ b/src/string.zig @@ -191,7 +191,7 @@ pub const String = extern struct { } pub fn createUTF8(bytes: []const u8) String { - return JSC.WebCore.Encoder.toBunStringComptime(bytes, .utf8); + return JSC.WebCore.encoding.toBunStringComptime(bytes, .utf8); } pub fn createUTF16(bytes: []const u16) String { @@ -447,7 +447,7 @@ pub const String = extern struct { /// Max WTFStringImpl length. /// **Not** in bytes. In characters. pub inline fn max_length() usize { - return JSC.string_allocation_limit; + return JSC.VirtualMachine.string_allocation_limit; } /// If the allocation fails, this will free the bytes and return a dead string. @@ -673,14 +673,14 @@ pub const String = extern struct { pub fn encodeInto(self: String, out: []u8, comptime enc: JSC.Node.Encoding) !usize { if (self.isUTF16()) { - return JSC.WebCore.Encoder.encodeIntoFrom16(self.utf16(), out, enc, true); + return JSC.WebCore.encoding.encodeIntoFrom16(self.utf16(), out, enc, true); } if (self.isUTF8()) { @panic("TODO"); } - return JSC.WebCore.Encoder.encodeIntoFrom8(self.latin1(), out, enc); + return JSC.WebCore.encoding.encodeIntoFrom8(self.latin1(), out, enc); } pub fn encode(self: String, enc: JSC.Node.Encoding) []u8 { @@ -1157,7 +1157,7 @@ pub const SliceWithUnderlyingString = struct { } return .{ - .underlying = JSC.WebCore.Encoder.toBunStringFromOwnedSlice(owned_input_bytes, encoding), + .underlying = JSC.WebCore.encoding.toBunStringFromOwnedSlice(owned_input_bytes, encoding), }; } diff --git a/src/string/WTFStringImpl.zig b/src/string/WTFStringImpl.zig index 876e5e6ba1..58be51439d 100644 --- a/src/string/WTFStringImpl.zig +++ b/src/string/WTFStringImpl.zig @@ -205,7 +205,7 @@ pub const WTFStringImplStruct = extern struct { pub fn utf8ByteLength(this: WTFStringImpl) usize { if (this.is8Bit()) { const input = this.latin1Slice(); - return if (input.len > 0) JSC.WebCore.Encoder.byteLengthU8(input.ptr, input.len, .utf8) else 0; + return if (input.len > 0) JSC.WebCore.encoding.byteLengthU8(input.ptr, input.len, .utf8) else 0; } else { const input = this.utf16Slice(); return if (input.len > 0) bun.strings.elementLengthUTF16IntoUTF8([]const u16, input) else 0; diff --git a/src/string_immutable.zig b/src/string_immutable.zig index 3025727c86..6fece06564 100644 --- a/src/string_immutable.zig +++ b/src/string_immutable.zig @@ -11,6 +11,9 @@ const grapheme = @import("./grapheme.zig"); const JSC = bun.JSC; const OOM = bun.OOM; +/// memmem is provided by libc on posix, but implemented in zig for windows. +pub const memmem = bun.sys.workaround_symbols.memmem; + pub const Encoding = enum { ascii, utf8, @@ -25,11 +28,11 @@ pub const EncodingNonAscii = enum { latin1, }; -pub inline fn containsChar(self: string, char: u8) bool { +pub fn containsChar(self: string, char: u8) callconv(bun.callconv_inline) bool { return indexOfChar(self, char) != null; } -pub inline fn containsCharT(comptime T: type, self: []const T, char: u8) bool { +pub fn containsCharT(comptime T: type, self: []const T, char: u8) callconv(bun.callconv_inline) bool { return switch (T) { u8 => containsChar(self, char), u16 => std.mem.indexOfScalar(u16, self, char) != null, @@ -37,15 +40,15 @@ pub inline fn containsCharT(comptime T: type, self: []const T, char: u8) bool { }; } -pub inline fn contains(self: string, str: string) bool { +pub fn contains(self: string, str: string) callconv(bun.callconv_inline) bool { return containsT(u8, self, str); } -pub inline fn containsT(comptime T: type, self: []const T, str: []const T) bool { +pub fn containsT(comptime T: type, self: []const T, str: []const T) callconv(bun.callconv_inline) bool { return indexOfT(T, self, str) != null; } -pub inline fn containsCaseInsensitiveASCII(self: string, str: string) bool { +pub fn containsCaseInsensitiveASCII(self: string, str: string) callconv(bun.callconv_inline) bool { var start: usize = 0; while (start + str.len <= self.len) { if (eqlCaseInsensitiveASCIIIgnoreLength(self[start..][0..str.len], str)) { @@ -56,7 +59,7 @@ pub inline fn containsCaseInsensitiveASCII(self: string, str: string) bool { return false; } -pub inline fn removeLeadingDotSlash(slice: []const u8) []const u8 { +pub fn removeLeadingDotSlash(slice: []const u8) callconv(bun.callconv_inline) []const u8 { if (slice.len >= 2) { if ((@as(u16, @bitCast(slice[0..2].*)) == comptime std.mem.readInt(u16, "./", .little)) or (Environment.isWindows and @as(u16, @bitCast(slice[0..2].*)) == comptime std.mem.readInt(u16, ".\\", .little))) @@ -124,7 +127,7 @@ pub fn indexOfAnyT(comptime T: type, str: []const T, comptime chars: anytype) ?O return null; } -pub inline fn containsComptime(self: string, comptime str: string) bool { +pub fn containsComptime(self: string, comptime str: string) callconv(bun.callconv_inline) bool { if (comptime str.len == 0) @compileError("Don't call this with an empty string plz."); const start = std.mem.indexOfScalar(u8, self, str[0]) orelse return false; @@ -148,7 +151,7 @@ pub fn inMapCaseInsensitive(self: []const u8, comptime ComptimeStringMap: anytyp return bun.String.ascii(self).inMapCaseInsensitive(ComptimeStringMap); } -pub inline fn containsAny(in: anytype, target: anytype) bool { +pub fn containsAny(in: anytype, target: anytype) callconv(bun.callconv_inline) bool { for (in) |str| if (contains(if (@TypeOf(str) == u8) &[1]u8{str} else bun.span(str), target)) return true; return false; } @@ -447,23 +450,23 @@ pub fn indexOfSigned(self: string, str: string) i32 { return @as(i32, @intCast(i)); } -pub inline fn lastIndexOfChar(self: []const u8, char: u8) ?usize { +pub fn lastIndexOfChar(self: []const u8, char: u8) callconv(bun.callconv_inline) ?usize { if (comptime Environment.isLinux) { if (@inComptime()) { return lastIndexOfCharT(u8, self, char); } - const start = bun.C.memrchr(self.ptr, char, self.len) orelse return null; + const start = bun.c.memrchr(self.ptr, char, self.len) orelse return null; const i = @intFromPtr(start) - @intFromPtr(self.ptr); return @intCast(i); } return lastIndexOfCharT(u8, self, char); } -pub inline fn lastIndexOfCharT(comptime T: type, self: []const T, char: T) ?usize { +pub fn lastIndexOfCharT(comptime T: type, self: []const T, char: T) callconv(bun.callconv_inline) ?usize { return std.mem.lastIndexOfScalar(T, self, char); } -pub inline fn lastIndexOf(self: string, str: string) ?usize { +pub fn lastIndexOf(self: string, str: string) callconv(bun.callconv_inline) ?usize { return std.mem.lastIndexOf(u8, self, str); } @@ -488,7 +491,7 @@ pub fn indexOf(self: string, str: string) ?usize { if (str_len == 1) return indexOfCharUsize(self, str_ptr[0]); - const start = bun.C.memmem(self_ptr, self_len, str_ptr, str_len) orelse return null; + const start = memmem(self_ptr, self_len, str_ptr, str_len) orelse return null; const i = @intFromPtr(start) - @intFromPtr(self_ptr); bun.unsafeAssert(i < self_len); @@ -609,7 +612,7 @@ pub const StringOrTinyString = struct { bun.unsafeAssert(@sizeOf(@This()) == 32); } - pub inline fn slice(this: *const StringOrTinyString) []const u8 { + pub fn slice(this: *const StringOrTinyString) callconv(bun.callconv_inline) []const u8 { // This is a switch expression instead of a statement to make sure it uses the faster assembly return switch (this.meta.is_tiny_string) { 1 => this.remainder_buf[0..this.meta.remainder_len], @@ -818,23 +821,23 @@ pub fn startsWithGeneric(comptime T: type, self: []const T, str: []const T) bool return eqlLong(bun.reinterpretSlice(u8, self[0..str.len]), bun.reinterpretSlice(u8, str[0..str.len]), false); } -pub inline fn endsWith(self: string, str: string) bool { +pub fn endsWith(self: string, str: string) callconv(bun.callconv_inline) bool { return str.len == 0 or @call(bun.callmod_inline, std.mem.endsWith, .{ u8, self, str }); } -pub inline fn endsWithComptime(self: string, comptime str: anytype) bool { +pub fn endsWithComptime(self: string, comptime str: anytype) callconv(bun.callconv_inline) bool { return self.len >= str.len and eqlComptimeIgnoreLen(self[self.len - str.len .. self.len], comptime str); } -pub inline fn startsWithChar(self: string, char: u8) bool { +pub fn startsWithChar(self: string, char: u8) callconv(bun.callconv_inline) bool { return self.len > 0 and self[0] == char; } -pub inline fn endsWithChar(self: string, char: u8) bool { +pub fn endsWithChar(self: string, char: u8) callconv(bun.callconv_inline) bool { return self.len > 0 and self[self.len - 1] == char; } -pub inline fn endsWithCharOrIsZeroLength(self: string, char: u8) bool { +pub fn endsWithCharOrIsZeroLength(self: string, char: u8) callconv(bun.callconv_inline) bool { return self.len == 0 or self[self.len - 1] == char; } @@ -1014,7 +1017,13 @@ pub fn hasSuffixComptime(self: string, comptime alt: anytype) bool { return self.len >= alt.len and eqlComptimeCheckLenWithType(u8, self[self.len - alt.len ..], alt, false); } -fn eqlComptimeCheckLenU8(a: []const u8, comptime b: []const u8, comptime check_len: bool) bool { +const eqlComptimeCheckLenU8 = if (bun.Environment.isDebug) eqlComptimeDebugRuntimeFallback else eqlComptimeCheckLenU8Impl; + +fn eqlComptimeDebugRuntimeFallback(a: []const u8, b: []const u8, check_len: bool) bool { + return std.mem.eql(u8, if (check_len) a else a.ptr[0..b.len], b); +} + +fn eqlComptimeCheckLenU8Impl(a: []const u8, comptime b: []const u8, comptime check_len: bool) bool { @setEvalBranchQuota(9999); if (comptime check_len) { @@ -1091,7 +1100,7 @@ pub fn eqlCaseInsensitiveASCII(a: string, b: string, comptime check_len: bool) b bun.unsafeAssert(b.len > 0); bun.unsafeAssert(a.len > 0); - return bun.C.strncasecmp(a.ptr, b.ptr, a.len) == 0; + return bun.c.strncasecmp(a.ptr, b.ptr, a.len) == 0; } pub fn eqlCaseInsensitiveT(comptime T: type, a: []const T, b: []const u8) bool { @@ -1192,7 +1201,7 @@ pub fn eqlLong(a_str: string, b_str: string, comptime check_len: bool) bool { return true; } -pub inline fn append(allocator: std.mem.Allocator, self: string, other: string) ![]u8 { +pub fn append(allocator: std.mem.Allocator, self: string, other: string) callconv(bun.callconv_inline) ![]u8 { var buf = try allocator.alloc(u8, self.len + other.len); if (self.len > 0) @memcpy(buf[0..self.len], self); @@ -1201,7 +1210,7 @@ pub inline fn append(allocator: std.mem.Allocator, self: string, other: string) return buf; } -pub inline fn concatAllocT(comptime T: type, allocator: std.mem.Allocator, strs: anytype) ![]T { +pub fn concatAllocT(comptime T: type, allocator: std.mem.Allocator, strs: anytype) callconv(bun.callconv_inline) ![]T { const buf = try allocator.alloc(T, len: { var len: usize = 0; inline for (strs) |s| { @@ -1215,7 +1224,7 @@ pub inline fn concatAllocT(comptime T: type, allocator: std.mem.Allocator, strs: }; } -pub inline fn concatBufT(comptime T: type, out: []T, strs: anytype) ![]T { +pub fn concatBufT(comptime T: type, out: []T, strs: anytype) callconv(bun.callconv_inline) ![]T { var remain = out; var n: usize = 0; inline for (strs) |s| { @@ -1257,7 +1266,7 @@ pub fn toUTF8AllocZ(allocator: std.mem.Allocator, js: []const u16) ![:0]u8 { return list.items[0 .. list.items.len - 1 :0]; } -pub inline fn appendUTF8MachineWordToUTF16MachineWord(output: *[@sizeOf(usize) / 2]u16, input: *const [@sizeOf(usize) / 2]u8) void { +pub fn appendUTF8MachineWordToUTF16MachineWord(output: *[@sizeOf(usize) / 2]u16, input: *const [@sizeOf(usize) / 2]u8) callconv(bun.callconv_inline) void { output[0 .. @sizeOf(usize) / 2].* = @as( [4]u16, @bitCast(@as( @@ -1267,7 +1276,7 @@ pub inline fn appendUTF8MachineWordToUTF16MachineWord(output: *[@sizeOf(usize) / ); } -pub inline fn copyU8IntoU16(output_: []u16, input_: []const u8) void { +pub fn copyU8IntoU16(output_: []u16, input_: []const u8) callconv(bun.callconv_inline) void { const output = output_; const input = input_; if (comptime Environment.allow_assert) assert(input.len <= output.len); @@ -1310,7 +1319,7 @@ pub fn copyU8IntoU16WithAlignment(comptime alignment: u21, output_: []align(alig } } -// pub inline fn copy(output_: []u8, input_: []const u8) void { +// pub fn copy(output_: []u8, input_: []const u8) callconv(bun.callconv_inline) void { // var output = output_; // var input = input_; // if (comptime Environment.allow_assert) assert(input.len <= output.len); @@ -1445,7 +1454,7 @@ pub const BOM = enum { pub fn removeAndConvertToUTF8AndFree(bom: BOM, allocator: std.mem.Allocator, bytes: []u8) ![]u8 { switch (bom) { .utf8 => { - bun.C.memmove(bytes.ptr, bytes.ptr + utf8_bytes.len, bytes.len - utf8_bytes.len); + _ = bun.c.memmove(bytes.ptr, bytes.ptr + utf8_bytes.len, bytes.len - utf8_bytes.len); return bytes[0 .. bytes.len - utf8_bytes.len]; }, .utf16_le => { @@ -1458,7 +1467,7 @@ pub const BOM = enum { else => { // TODO: this needs to re-encode, for now we just remove the BOM const bom_bytes = bom.getHeader(); - bun.C.memmove(bytes.ptr, bytes.ptr + bom_bytes.len, bytes.len - bom_bytes.len); + _ = bun.c.memmove(bytes.ptr, bytes.ptr + bom_bytes.len, bytes.len - bom_bytes.len); return bytes[0 .. bytes.len - bom_bytes.len]; }, } @@ -2795,12 +2804,12 @@ pub fn escapeHTMLForLatin1Input(allocator: std.mem.Allocator, latin1: []const u8 break :brk values; }; - inline fn appendString(buf: [*]u8, comptime str: []const u8) usize { + fn appendString(buf: [*]u8, comptime str: []const u8) callconv(bun.callconv_inline) usize { buf[0..str.len].* = str[0..str.len].*; return str.len; } - pub inline fn append(buf: [*]u8, char: u8) usize { + pub fn append(buf: [*]u8, char: u8) callconv(bun.callconv_inline) usize { if (lengths[char] == 1) { buf[0] = char; return 1; @@ -2816,7 +2825,7 @@ pub fn escapeHTMLForLatin1Input(allocator: std.mem.Allocator, latin1: []const u8 }; } - pub inline fn push(comptime len: anytype, chars_: *const [len]u8, allo: std.mem.Allocator) Escaped(u8) { + pub fn push(comptime len: anytype, chars_: *const [len]u8, allo: std.mem.Allocator) callconv(bun.callconv_inline) Escaped(u8) { const chars = chars_.*; var total: usize = 0; @@ -3902,28 +3911,28 @@ pub fn isAllASCII(slice: []const u8) bool { } // #define U16_LEAD(supplementary) (UChar)(((supplementary)>>10)+0xd7c0) -pub inline fn u16Lead(supplementary: anytype) u16 { +pub fn u16Lead(supplementary: anytype) callconv(bun.callconv_inline) u16 { return @intCast((supplementary >> 10) + 0xd7c0); } // #define U16_TRAIL(supplementary) (UChar)(((supplementary)&0x3ff)|0xdc00) -pub inline fn u16Trail(supplementary: anytype) u16 { +pub fn u16Trail(supplementary: anytype) callconv(bun.callconv_inline) u16 { return @intCast((supplementary & 0x3ff) | 0xdc00); } // #define U16_IS_TRAIL(c) (((c)&0xfffffc00)==0xdc00) -pub inline fn u16IsTrail(supplementary: u16) bool { +pub fn u16IsTrail(supplementary: u16) callconv(bun.callconv_inline) bool { return (@as(u32, @intCast(supplementary)) & 0xfffffc00) == 0xdc00; } // #define U16_IS_LEAD(c) (((c)&0xfffffc00)==0xd800) -pub inline fn u16IsLead(supplementary: u16) bool { +pub fn u16IsLead(supplementary: u16) callconv(bun.callconv_inline) bool { return (@as(u32, @intCast(supplementary)) & 0xfffffc00) == 0xd800; } // #define U16_GET_SUPPLEMENTARY(lead, trail) \ // (((UChar32)(lead)<<10UL)+(UChar32)(trail)-U16_SURROGATE_OFFSET) -pub inline fn u16GetSupplementary(lead: u32, trail: u32) u32 { +pub fn u16GetSupplementary(lead: u32, trail: u32) callconv(bun.callconv_inline) u32 { const shifted = lead << 10; return (shifted + trail) - u16_surrogate_offset; } @@ -4169,7 +4178,7 @@ pub fn decodeHexToBytesTruncate(destination: []u8, comptime Char: type, source: return _decodeHexToBytes(destination, Char, source, true) catch 0; } -inline fn _decodeHexToBytes(destination: []u8, comptime Char: type, source: []const Char, comptime truncate: bool) !usize { +fn _decodeHexToBytes(destination: []u8, comptime Char: type, source: []const Char, comptime truncate: bool) callconv(bun.callconv_inline) !usize { var remain = destination; var input = source; @@ -4661,7 +4670,7 @@ pub fn join(slices: []const string, delimiter: string, allocator: std.mem.Alloca pub fn order(a: []const u8, b: []const u8) std.math.Order { const len = @min(a.len, b.len); - const cmp = if (comptime Environment.isNative) bun.C.memcmp(a.ptr, b.ptr, len) else return std.mem.order(u8, a, b); + const cmp = if (comptime Environment.isNative) bun.c.memcmp(a.ptr, b.ptr, len) else return std.mem.order(u8, a, b); return switch (std.math.sign(cmp)) { 0 => std.math.order(a.len, b.len), 1 => .gt, @@ -4869,7 +4878,7 @@ pub fn NewCodePointIterator(comptime CodePointType_: type, comptime zeroValue: c return true; } - inline fn nextCodepointSlice(it: *Iterator) []const u8 { + fn nextCodepointSlice(it: *Iterator) callconv(bun.callconv_inline) []const u8 { const bytes = it.bytes; const prev = it.i; const next_ = prev + it.next_width; @@ -5294,15 +5303,15 @@ pub fn convertUTF16toUTF8InBuffer( return buf[0..result]; } -pub inline fn charIsAnySlash(char: u8) bool { +pub fn charIsAnySlash(char: u8) callconv(bun.callconv_inline) bool { return char == '/' or char == '\\'; } -pub inline fn startsWithWindowsDriveLetter(s: []const u8) bool { +pub fn startsWithWindowsDriveLetter(s: []const u8) callconv(bun.callconv_inline) bool { return startsWithWindowsDriveLetterT(u8, s); } -pub inline fn startsWithWindowsDriveLetterT(comptime T: type, s: []const T) bool { +pub fn startsWithWindowsDriveLetterT(comptime T: type, s: []const T) callconv(bun.callconv_inline) bool { return s.len > 2 and s[1] == ':' and switch (s[0]) { 'a'...'z', 'A'...'Z' => true, else => false, @@ -6162,7 +6171,7 @@ fn QuoteEscapeFormat(comptime flags: QuoteEscapeFormatFlags) type { } /// Generic. Works on []const u8, []const u16, etc -pub inline fn indexOfScalar(input: anytype, scalar: std.meta.Child(@TypeOf(input))) ?usize { +pub fn indexOfScalar(input: anytype, scalar: std.meta.Child(@TypeOf(input))) callconv(bun.callconv_inline) ?usize { if (comptime std.meta.Child(@TypeOf(input)) == u8) { return strings.indexOfCharUsize(input, scalar); } else { diff --git a/src/sys.zig b/src/sys.zig index 93ae508530..2ae5ba8702 100644 --- a/src/sys.zig +++ b/src/sys.zig @@ -1,9 +1,45 @@ -// This file is entirely based on Zig's std.posix -// The differences are in error handling +//! Cross-platform "system call" abstractions. On linux, many of these functions +//! emit direct system calls directly (std.os.linux). Others call `libc` APIs. +//! Windows uses a mix of `libuv`, `kernel32` and `ntdll`. macOS uses `libc`. +//! +//! Sometimes this namespace is referred to as "Syscall", prefer "bun.sys"/"sys" +// +// TODO: Split and organize this file. It is likely worth moving many functions +// into methods on `bun.FD`, and keeping this namespace to just overall stuff +// like `Error`, `Maybe`, `Tag`, and so on. +const sys = @This(); // to avoid ambiguous references. +const platform_defs = switch (Environment.os) { + .windows => @import("errno/windows_errno.zig"), + .linux => @import("errno/linux_errno.zig"), + .mac => @import("errno/darwin_errno.zig"), + .wasm => {}, +}; +pub const workaround_symbols = @import("workaround_missing_symbols.zig").current; +/// Enum of `errno` values +pub const E = platform_defs.E; +/// Namespace of (potentially polyfilled) libuv `errno` values. +/// Polyfilled on posix, mirrors the real libuv definitions on Windows. +pub const UV_E = platform_defs.UV_E; +pub const S = platform_defs.S; +/// TODO: The way we do errors in Bun needs to get cleaned up. This enum is way +/// too complicated; It's duplicated three times, and inside of it it has tons +/// of re-listings of all errno codes. Why is SystemErrno different than `E`? ...etc! +/// +/// The problem is because we use libc in some cases and we use zig's std lib in +/// other places and other times we go direct. So we end up with a lot of +/// redundant code. +pub const SystemErrno = platform_defs.SystemErrno; +pub const getErrno = platform_defs.getErrno; + +comptime { + _ = &workaround_symbols; // execute comptime logic to export any needed symbols +} + const std = @import("std"); const builtin = @import("builtin"); const bun = @import("bun"); +const c = bun.c; // translated c headers const posix = std.posix; const assertIsValidWindowsPath = bun.strings.assertIsValidWindowsPath; @@ -17,17 +53,15 @@ const libc = std.posix.system; const windows = bun.windows; -const C = bun.C; const Environment = bun.Environment; const JSC = bun.JSC; const MAX_PATH_BYTES = bun.MAX_PATH_BYTES; const PathString = bun.PathString; -const Syscall = @This(); const SystemError = JSC.SystemError; const linux = syscall; -pub const sys_uv = if (Environment.isWindows) @import("./sys_uv.zig") else Syscall; +pub const sys_uv = if (Environment.isWindows) @import("./sys_uv.zig") else sys; pub const F_OK = 0; pub const X_OK = 1; @@ -39,38 +73,19 @@ pub const syslog = log; pub const syscall = switch (Environment.os) { .linux => std.os.linux, - // We don't directly use the Darwin syscall interface. + // macOS requires using libc .mac => std.c, else => @compileError("not implemented"), }; -/// Non-cancellable versions of various libc functions are undocumented -const darwin_nocancel = struct { - const c = std.c; - pub extern "c" fn @"recvfrom$NOCANCEL"(sockfd: c.fd_t, noalias buf: *anyopaque, len: usize, flags: u32, noalias src_addr: ?*c.sockaddr, noalias addrlen: ?*c.socklen_t) isize; - pub extern "c" fn @"sendto$NOCANCEL"(sockfd: c.fd_t, buf: *const anyopaque, len: usize, flags: u32, dest_addr: ?*const c.sockaddr, addrlen: c.socklen_t) isize; - pub extern "c" fn @"fcntl$NOCANCEL"(fd: c.fd_t, cmd: c_int, ...) c_int; - // pub extern "c" fn @"sendmsg$NOCANCEL"(sockfd: c.fd_t, msg: *const std.x.os.Socket.Message, flags: c_int) isize; - // pub extern "c" fn @"recvmsg$NOCANCEL"(sockfd: c.fd_t, msg: *std.x.os.Socket.Message, flags: c_int) isize; - pub extern "c" fn @"connect$NOCANCEL"(sockfd: c.fd_t, sock_addr: *const c.sockaddr, addrlen: c.socklen_t) c_int; - pub extern "c" fn @"accept$NOCANCEL"(sockfd: c.fd_t, noalias addr: ?*c.sockaddr, noalias addrlen: ?*c.socklen_t) c_int; - pub extern "c" fn @"accept4$NOCANCEL"(sockfd: c.fd_t, noalias addr: ?*c.sockaddr, noalias addrlen: ?*c.socklen_t, flags: c_uint) c_int; - pub extern "c" fn @"open$NOCANCEL"(path: [*:0]const u8, oflag: c_uint, ...) c_int; - // https://opensource.apple.com/source/xnu/xnu-7195.81.3/libsyscall/wrappers/open-base.c - pub extern "c" fn @"openat$NOCANCEL"(fd: c.fd_t, path: [*:0]const u8, oflag: c_uint, ...) c_int; - pub extern "c" fn @"read$NOCANCEL"(fd: c.fd_t, buf: [*]u8, nbyte: usize) isize; - pub extern "c" fn @"pread$NOCANCEL"(fd: c.fd_t, buf: [*]u8, nbyte: usize, offset: c.off_t) isize; - pub extern "c" fn @"preadv$NOCANCEL"(fd: c.fd_t, uf: [*]std.posix.iovec, count: i32, offset: c.off_t) isize; - pub extern "c" fn @"readv$NOCANCEL"(fd: c.fd_t, uf: [*]std.posix.iovec, count: i32) isize; - pub extern "c" fn @"write$NOCANCEL"(fd: c.fd_t, buf: [*]const u8, nbyte: usize) isize; - pub extern "c" fn @"writev$NOCANCEL"(fd: c.fd_t, buf: [*]const std.posix.iovec_const, count: i32) isize; - pub extern "c" fn @"pwritev$NOCANCEL"(fd: c.fd_t, buf: [*]const std.posix.iovec_const, count: i32, offset: c.off_t) isize; -}; +const darwin_nocancel = bun.darwin.nocancel; fn toPackedO(number: anytype) std.posix.O { return @bitCast(number); } +pub const Mode = std.posix.mode_t; + pub const O = switch (Environment.os) { .mac => struct { pub const PATH = 0x0000; @@ -157,7 +172,7 @@ pub const O = switch (Environment.os) { pub const TMPFILE = 0o20040000; pub const NDELAY = NONBLOCK; - pub const SYMLINK = bun.c.O_SYMLINK; + pub const SYMLINK = c.O_SYMLINK; pub const toPacked = toPackedO; }, @@ -192,8 +207,6 @@ pub const O = switch (Environment.os) { }, }; -pub const S = if (Environment.isLinux) linux.S else if (Environment.isPosix) std.posix.S else struct {}; - pub const Tag = enum(u8) { TODO, @@ -309,8 +322,6 @@ pub const Tag = enum(u8) { }; pub const Error = struct { - const E = bun.C.E; - const retry_errno = if (Environment.isLinux) @as(Int, @intCast(@intFromEnum(E.AGAIN))) else if (Environment.isMac) @@ -329,7 +340,7 @@ pub const Error = struct { fd: bun.FileDescriptor = bun.invalid_fd, from_libuv: if (Environment.isWindows) bool else void = if (Environment.isWindows) false else undefined, path: []const u8 = "", - syscall: Syscall.Tag = Syscall.Tag.TODO, + syscall: sys.Tag = sys.Tag.TODO, dest: []const u8 = "", pub fn clone(this: *const Error, allocator: std.mem.Allocator) !Error { @@ -339,14 +350,14 @@ pub const Error = struct { return copy; } - pub fn fromCode(errno: E, syscall_tag: Syscall.Tag) Error { + pub fn fromCode(errno: E, syscall_tag: sys.Tag) Error { return .{ .errno = @as(Int, @intCast(@intFromEnum(errno))), .syscall = syscall_tag, }; } - pub fn fromCodeInt(errno: anytype, syscall_tag: Syscall.Tag) Error { + pub fn fromCodeInt(errno: anytype, syscall_tag: sys.Tag) Error { return .{ .errno = @as(Int, @intCast(if (Environment.isWindows) @abs(errno) else errno)), .syscall = syscall_tag, @@ -402,7 +413,7 @@ pub const Error = struct { }; } - pub inline fn withPathAndSyscall(this: Error, path: anytype, syscall_: Syscall.Tag) Error { + pub inline fn withPathAndSyscall(this: Error, path: anytype, syscall_: sys.Tag) Error { if (std.meta.Child(@TypeOf(path)) == u16) { @compileError("Do not pass WString path to withPath, it needs the path encoded as utf8"); } @@ -449,17 +460,17 @@ pub const Error = struct { // setRuntimeSafety(false) because we use tagName function, which will be null on invalid enum value. @setRuntimeSafety(false); if (this.from_libuv) { - break :brk @as(C.SystemErrno, @enumFromInt(@intFromEnum(bun.windows.libuv.translateUVErrorToE(this.errno)))); + break :brk @as(SystemErrno, @enumFromInt(@intFromEnum(bun.windows.libuv.translateUVErrorToE(this.errno)))); } - break :brk @as(C.SystemErrno, @enumFromInt(this.errno)); + break :brk @as(SystemErrno, @enumFromInt(this.errno)); }; - if (bun.tagName(bun.C.SystemErrno, system_errno)) |errname| { + if (bun.tagName(SystemErrno, system_errno)) |errname| { return errname; } - } else if (this.errno > 0 and this.errno < C.SystemErrno.max) { - const system_errno = @as(C.SystemErrno, @enumFromInt(this.errno)); - if (bun.tagName(bun.C.SystemErrno, system_errno)) |errname| { + } else if (this.errno > 0 and this.errno < SystemErrno.max) { + const system_errno = @as(SystemErrno, @enumFromInt(this.errno)); + if (bun.tagName(SystemErrno, system_errno)) |errname| { return errname; } } @@ -473,14 +484,14 @@ pub const Error = struct { /// 1. Convert libuv errno values into libc ones. /// 2. Get the tag name as a string for printing. - pub fn getErrorCodeTagName(err: *const Error) ?struct { [:0]const u8, C.SystemErrno } { + pub fn getErrorCodeTagName(err: *const Error) ?struct { [:0]const u8, SystemErrno } { if (!Environment.isWindows) { - if (err.errno > 0 and err.errno < C.SystemErrno.max) { - const system_errno = @as(C.SystemErrno, @enumFromInt(err.errno)); + if (err.errno > 0 and err.errno < SystemErrno.max) { + const system_errno = @as(SystemErrno, @enumFromInt(err.errno)); return .{ @tagName(system_errno), system_errno }; } } else { - const system_errno: C.SystemErrno = brk: { + const system_errno: SystemErrno = brk: { // setRuntimeSafety(false) because we use tagName function, which will be null on invalid enum value. @setRuntimeSafety(false); if (err.from_libuv) { @@ -489,7 +500,7 @@ pub const Error = struct { break :brk @enumFromInt(err.errno); }; - if (bun.tagName(bun.C.SystemErrno, system_errno)) |errname| { + if (bun.tagName(SystemErrno, system_errno)) |errname| { return .{ errname, system_errno }; } } @@ -540,11 +551,11 @@ pub const Error = struct { }; // errno label - var code: ?[:0]const u8 = null; + var maybe_code: ?[:0]const u8 = null; var label: ?[]const u8 = null; if (this.getErrorCodeTagName()) |resolved_errno| { - code, const system_errno = resolved_errno; - err.code = bun.String.static(code.?); + maybe_code, const system_errno = resolved_errno; + err.code = bun.String.static(maybe_code.?); label = libuv_error_map.get(system_errno); } @@ -555,8 +566,8 @@ pub const Error = struct { var stream = std.io.fixedBufferStream(&message_buf); const writer = stream.writer(); brk: { - if (code) |c| { - writer.writeAll(c) catch break :brk; + if (maybe_code) |code| { + writer.writeAll(code) catch break :brk; writer.writeAll(": ") catch break :brk; } writer.writeAll(label orelse "Unknown Error") catch break :brk; @@ -598,7 +609,7 @@ pub const Error = struct { pub inline fn todo() Error { if (Environment.isDebug) { - @panic("bun.sys.Error.todo() was called"); + @panic("Error.todo() was called"); } return Error{ .errno = todo_errno, .syscall = .TODO }; } @@ -613,7 +624,7 @@ pub const Error = struct { }; pub fn Maybe(comptime ReturnTypeT: type) type { - return JSC.Node.Maybe(ReturnTypeT, Error); + return bun.api.node.Maybe(ReturnTypeT, Error); } pub fn getcwd(buf: *bun.PathBuffer) Maybe([]const u8) { @@ -643,7 +654,7 @@ pub fn getcwdZ(buf: *bun.PathBuffer) Maybe([:0]const u8) { Result.errnoSysP(@as(c_int, 0), .getcwd, buf).?; } -const syscall_or_C = if (Environment.isLinux) syscall else bun.C; +const syscall_or_c = if (Environment.isLinux) syscall else bun.c; pub fn fchown(fd: bun.FileDescriptor, uid: JSC.Node.uid_t, gid: JSC.Node.gid_t) Maybe(void) { if (comptime Environment.isWindows) { @@ -651,7 +662,7 @@ pub fn fchown(fd: bun.FileDescriptor, uid: JSC.Node.uid_t, gid: JSC.Node.gid_t) } while (true) { - const rc = syscall_or_C.fchown(fd.cast(), uid, gid); + const rc = syscall_or_c.fchown(fd.cast(), uid, gid); if (Maybe(void).errnoSysFd(rc, .fchown, fd)) |err| { if (err.getErrno() == .INTR) continue; return err; @@ -669,7 +680,7 @@ pub fn fchmod(fd: bun.FileDescriptor, mode: bun.Mode) Maybe(void) { } while (true) { - const rc = syscall_or_C.fchmod(fd.cast(), mode); + const rc = syscall_or_c.fchmod(fd.cast(), mode); if (Maybe(void).errnoSysFd(rc, .fchmod, fd)) |err| { if (err.getErrno() == .INTR) continue; return err; @@ -685,7 +696,7 @@ pub fn fchmodat(fd: bun.FileDescriptor, path: [:0]const u8, mode: bun.Mode, flag if (comptime Environment.isWindows) @compileError("Use fchmod instead"); while (true) { - const rc = syscall_or_C.fchmodat(fd.cast(), path.ptr, mode, flags); + const rc = syscall_or_c.fchmodat(fd.cast(), path.ptr, mode, flags); if (Maybe(void).errnoSysFd(rc, .fchmodat, fd)) |err| { if (err.getErrno() == .INTR) continue; return err; @@ -703,7 +714,7 @@ pub fn chmod(path: [:0]const u8, mode: bun.Mode) Maybe(void) { } while (true) { - const rc = syscall_or_C.chmod(path.ptr, mode); + const rc = syscall_or_c.chmod(path.ptr, mode); if (Maybe(void).errnoSysP(rc, .chmod, path)) |err| { if (err.getErrno() == .INTR) continue; return err; @@ -724,7 +735,7 @@ pub fn chdirOSPath(path: bun.stringZ, destination: if (Environment.isPosix) bun. if (comptime Environment.isWindows) { const wbuf = bun.WPathBufferPool.get(); defer bun.WPathBufferPool.put(wbuf); - if (bun.c.SetCurrentDirectoryW(bun.strings.toWDirPath(wbuf, destination)) == windows.FALSE) { + if (c.SetCurrentDirectoryW(bun.strings.toWDirPath(wbuf, destination)) == windows.FALSE) { log("SetCurrentDirectory({s}) = {d}", .{ destination, kernel32.GetLastError() }); return Maybe(void).errnoSysPD(0, .chdir, path, destination) orelse Maybe(void).success; } @@ -744,11 +755,11 @@ pub fn chdir(path: anytype, destination: anytype) Maybe(void) { if (comptime Type == []u8 or Type == []const u8) { return chdirOSPath( &(std.posix.toPosixPath(path) catch return .{ .err = .{ - .errno = @intFromEnum(bun.C.SystemErrno.EINVAL), + .errno = @intFromEnum(SystemErrno.EINVAL), .syscall = .chdir, } }), &(std.posix.toPosixPath(destination) catch return .{ .err = .{ - .errno = @intFromEnum(bun.C.SystemErrno.EINVAL), + .errno = @intFromEnum(SystemErrno.EINVAL), .syscall = .chdir, } }), ); @@ -803,7 +814,7 @@ pub fn stat(path: [:0]const u8) Maybe(bun.Stat) { // aarch64 linux doesn't implement a "stat" syscall. It's all fstatat. linux.fstatat(std.posix.AT.FDCWD, path, &stat_, 0) else - syscall_or_C.stat(path, &stat_); + workaround_symbols.stat(path, &stat_); if (comptime Environment.allow_assert) log("stat({s}) = {d}", .{ bun.asByteSlice(path), rc }); @@ -819,9 +830,9 @@ pub fn statfs(path: [:0]const u8) Maybe(bun.StatFS) { } else { var statfs_ = mem.zeroes(bun.StatFS); const rc = if (Environment.isLinux) - bun.c.statfs(path, &statfs_) + c.statfs(path, &statfs_) else if (Environment.isMac) - bun.c.statfs(path, &statfs_) + c.statfs(path, &statfs_) else @compileError("Unsupported platform"); @@ -838,7 +849,7 @@ pub fn lstat(path: [:0]const u8) Maybe(bun.Stat) { return sys_uv.lstat(path); } else { var stat_buf = mem.zeroes(bun.Stat); - if (Maybe(bun.Stat).errnoSysP(C.lstat(path, &stat_buf), .lstat, path)) |err| return err; + if (Maybe(bun.Stat).errnoSysP(workaround_symbols.lstat(path, &stat_buf), .lstat, path)) |err| return err; return Maybe(bun.Stat){ .result = stat_buf }; } } @@ -853,7 +864,7 @@ pub fn fstat(fd: bun.FileDescriptor) Maybe(bun.Stat) { var stat_ = mem.zeroes(bun.Stat); - const rc = syscall_or_C.fstat(fd.cast(), &stat_); + const rc = workaround_symbols.fstat(fd.cast(), &stat_); if (comptime Environment.allow_assert) log("fstat({}) = {d}", .{ fd, rc }); @@ -949,7 +960,7 @@ pub fn mkdirA(file_path: []const u8, flags: mode_t) Maybe(void) { if (comptime Environment.isMac) { return Maybe(void).errnoSysP(syscall.mkdir(&(std.posix.toPosixPath(file_path) catch return Maybe(void){ .err = .{ - .errno = @intFromEnum(bun.C.E.NOMEM), + .errno = @intFromEnum(E.NOMEM), .syscall = .open, }, }), flags), .mkdir, file_path) orelse Maybe(void).success; @@ -958,7 +969,7 @@ pub fn mkdirA(file_path: []const u8, flags: mode_t) Maybe(void) { if (comptime Environment.isLinux) { return Maybe(void).errnoSysP(linux.mkdir(&(std.posix.toPosixPath(file_path) catch return Maybe(void){ .err = .{ - .errno = @intFromEnum(bun.C.E.NOMEM), + .errno = @intFromEnum(E.NOMEM), .syscall = .open, }, }), flags), .mkdir, file_path) orelse Maybe(void).success; @@ -981,7 +992,7 @@ pub fn mkdirOSPath(file_path: bun.OSPathSliceZ, flags: mode_t) Maybe(void) { return switch (Environment.os) { else => mkdir(file_path, flags), .windows => { - const rc = bun.c.CreateDirectoryW(file_path, null); + const rc = c.CreateDirectoryW(file_path, null); if (Maybe(void).errnoSys( rc, .mkdir, @@ -1000,7 +1011,7 @@ const fnctl_int = if (Environment.isLinux) usize else c_int; pub fn fcntl(fd: bun.FileDescriptor, cmd: i32, arg: anytype) Maybe(fnctl_int) { while (true) { const result = switch (@TypeOf(arg)) { - i32, comptime_int, c_int => fcntl_symbol(fd.cast(), cmd, @as(c_int, arg)), + i32, comptime_int, c_int => fcntl_symbol(fd.native(), cmd, @as(c_int, arg)), i64 => fcntl_symbol(fd.cast(), cmd, @as(c_long, @bitCast(arg))), *const anyopaque, *anyopaque, usize => fcntl_symbol(fd.cast(), cmd, arg), else => @compileError("Unsupported argument type for fcntl"), @@ -1015,22 +1026,6 @@ pub fn fcntl(fd: bun.FileDescriptor, cmd: i32, arg: anytype) Maybe(fnctl_int) { unreachable; } -pub fn getErrno(rc: anytype) bun.C.E { - if (comptime Environment.isWindows) { - if (comptime @TypeOf(rc) == bun.windows.NTSTATUS) { - return bun.windows.translateNTStatusToErrno(rc); - } - - if (bun.windows.Win32Error.get().toSystemErrno()) |e| { - return e.toE(); - } - - return bun.C.E.UNKNOWN; - } - - return bun.C.getErrno(rc); -} - const w = std.os.windows; /// Normalizes for ntdll.dll APIs. Replaces long-path prefixes with nt object @@ -1087,7 +1082,7 @@ pub fn normalizePathWindows( if (buf.len < path.len) { return .{ .err = .{ - .errno = @intFromEnum(bun.C.E.NOMEM), + .errno = @intFromEnum(E.NOMEM), .syscall = .open, }, }; @@ -1108,7 +1103,7 @@ pub fn normalizePathWindows( const base_path = bun.windows.GetFinalPathNameByHandle(base_fd, w.GetFinalPathNameByHandleFormat{}, buf) catch { return .{ .err = .{ - .errno = @intFromEnum(bun.C.E.BADFD), + .errno = @intFromEnum(E.BADFD), .syscall = .open, } }; }; @@ -1223,7 +1218,7 @@ fn openDirAtWindowsNtPath( return .{ .err = .{ - .errno = @intFromEnum(bun.C.E.UNKNOWN), + .errno = @intFromEnum(E.UNKNOWN), .syscall = .open, }, }; @@ -1251,7 +1246,7 @@ fn openWindowsDevicePath( .errno = if (windows.Win32Error.get().toSystemErrno()) |e| @intFromEnum(e) else - @intFromEnum(bun.C.E.UNKNOWN), + @intFromEnum(E.UNKNOWN), .syscall = .open, } }; } @@ -1342,7 +1337,7 @@ pub fn openFileAtWindowsNtPath( const path_len_bytes = std.math.cast(u16, path.len * 2) orelse return .{ .err = .{ - .errno = @intFromEnum(bun.C.E.NOMEM), + .errno = @intFromEnum(E.NOMEM), .syscall = .open, }, }; @@ -1436,7 +1431,7 @@ pub fn openFileAtWindowsNtPath( if (kernel32.SetFilePointerEx(result, 0, null, FILE_END) == 0) { return .{ .err = .{ - .errno = @intFromEnum(bun.C.E.UNKNOWN), + .errno = @intFromEnum(E.UNKNOWN), .syscall = .SetFilePointerEx, }, }; @@ -1456,7 +1451,7 @@ pub fn openFileAtWindowsNtPath( return .{ .err = .{ - .errno = @intFromEnum(bun.C.E.UNKNOWN), + .errno = @intFromEnum(E.UNKNOWN), .syscall = .open, }, }; @@ -1730,7 +1725,7 @@ pub fn openatOSPath(dirfd: bun.FileDescriptor, file_path: bun.OSPathSliceZ, flag if (comptime Environment.allow_assert) log("openat({}, {s}, {d}) = {d}", .{ dirfd, bun.sliceTo(file_path, 0), flags, rc }); - return switch (Syscall.getErrno(rc)) { + return switch (sys.getErrno(rc)) { .SUCCESS => .{ .result = .fromNative(@intCast(rc)) }, .INTR => continue, else => |err| { @@ -1761,7 +1756,7 @@ pub fn access(path: bun.OSPathSliceZ, mode: i32) Maybe(void) { return .{ .result = {} }; } else { return .{ .err = .{ - .errno = @intFromEnum(bun.C.E.PERM), + .errno = @intFromEnum(E.PERM), .syscall = .access, } }; } @@ -1781,7 +1776,7 @@ pub fn openat(dirfd: bun.FileDescriptor, file_path: [:0]const u8, flags: i32, pe pub fn openatFileWithLibuvFlags(dirfd: bun.FileDescriptor, file_path: [:0]const u8, flags: bun.JSC.Node.FileSystemFlags, perm: bun.Mode) Maybe(bun.FileDescriptor) { if (comptime Environment.isWindows) { const f = flags.toWindows() catch return .{ .err = .{ - .errno = @intFromEnum(bun.C.E.INVAL), + .errno = @intFromEnum(E.INVAL), .syscall = .open, .path = file_path, } }; @@ -1799,7 +1794,7 @@ pub fn openatA(dirfd: bun.FileDescriptor, file_path: []const u8, flags: i32, per const pathZ = std.posix.toPosixPath(file_path) catch return Maybe(bun.FileDescriptor){ .err = .{ - .errno = @intFromEnum(bun.C.E.NAMETOOLONG), + .errno = @intFromEnum(E.NAMETOOLONG), .syscall = .open, }, }; @@ -1889,14 +1884,14 @@ pub fn write(fd: bun.FileDescriptor, bytes: []const u8) Maybe(usize) { if (er == .ACCESS_DENIED) { // file is not writable return .{ .err = .{ - .errno = @intFromEnum(bun.C.SystemErrno.EBADF), + .errno = @intFromEnum(SystemErrno.EBADF), .syscall = .write, .fd = fd, } }; } - const errno = (bun.C.SystemErrno.init(bun.windows.kernel32.GetLastError()) orelse bun.C.SystemErrno.EUNKNOWN).toE(); + const errno = (SystemErrno.init(bun.windows.kernel32.GetLastError()) orelse SystemErrno.EUNKNOWN).toE(); return .{ - .err = Syscall.Error{ + .err = sys.Error{ .errno = @intFromEnum(errno), .syscall = .write, .fd = fd, @@ -2165,7 +2160,7 @@ pub fn read(fd: bun.FileDescriptor, buf: []u8) Maybe(usize) { const rc = kernel32.ReadFile(fd.native(), buf.ptr, @as(u32, @intCast(adjusted_len)), &amount_read, null); if (rc == windows.FALSE) { const ret: Maybe(usize) = .{ - .err = Syscall.Error{ + .err = sys.Error{ .errno = @intFromEnum(bun.windows.getLastErrno()), .syscall = .read, .fd = fd, @@ -2202,7 +2197,7 @@ pub fn readAll(fd: bun.FileDescriptor, buf: []u8) Maybe(usize) { return .{ .result = total_read }; } -const socket_flags_nonblock = bun.c.MSG_DONTWAIT | bun.c.MSG_NOSIGNAL; +const socket_flags_nonblock = c.MSG_DONTWAIT | c.MSG_NOSIGNAL; pub fn recvNonBlock(fd: bun.FileDescriptor, buf: []u8) Maybe(usize) { return recv(fd, buf, socket_flags_nonblock); @@ -2363,12 +2358,12 @@ pub const RenameAt2Flags = packed struct { var flags: u32 = 0; if (comptime Environment.isMac) { - if (self.exchange) flags |= bun.C.RENAME_SWAP; - if (self.exclude) flags |= bun.C.RENAME_EXCL; - if (self.nofollow) flags |= bun.C.RENAME_NOFOLLOW_ANY; + if (self.exchange) flags |= c.RENAME_SWAP; + if (self.exclude) flags |= c.RENAME_EXCL; + if (self.nofollow) flags |= c.RENAME_NOFOLLOW_ANY; } else { - if (self.exchange) flags |= bun.C.RENAME_EXCHANGE; - if (self.exclude) flags |= bun.C.RENAME_NOREPLACE; + if (self.exchange) flags |= c.RENAME_EXCHANGE; + if (self.exclude) flags |= c.RENAME_NOREPLACE; } return flags; @@ -2385,9 +2380,9 @@ pub fn renameatConcurrently( switch (renameatConcurrentlyWithoutFallback(from_dir_fd, from, to_dir_fd, to)) { .result => return Maybe(void).success, .err => |e| { - if (opts.move_fallback and e.getErrno() == bun.C.E.XDEV) { + if (opts.move_fallback and e.getErrno() == E.XDEV) { bun.Output.debugWarn("renameatConcurrently() failed with E.XDEV, falling back to moveFileZSlowMaybe()", .{}); - return bun.C.moveFileZSlowMaybe(from_dir_fd, from, to_dir_fd, to); + return moveFileZSlowMaybe(from_dir_fd, from, to_dir_fd, to); } return .{ .err = e }; }, @@ -2406,7 +2401,7 @@ pub fn renameatConcurrentlyWithoutFallback( { // Happy path: the folder doesn't exist in the cache dir, so we can // just rename it. We don't need to delete anything. - var err = switch (bun.sys.renameat2(from_dir_fd, from, to_dir_fd, to, .{ + var err = switch (renameat2(from_dir_fd, from, to_dir_fd, to, .{ .exclude = true, })) { // if ENOENT don't retry @@ -2423,7 +2418,7 @@ pub fn renameatConcurrentlyWithoutFallback( else => false, }) { did_atomically_replace = true; - switch (bun.sys.renameat2(from_dir_fd, from, to_dir_fd, to, .{ + switch (renameat2(from_dir_fd, from, to_dir_fd, to, .{ .exchange = true, })) { .err => {}, @@ -2441,7 +2436,7 @@ pub fn renameatConcurrentlyWithoutFallback( } else { std.fs.deleteTreeAbsolute(to) catch {}; } - switch (bun.sys.renameat(from_dir_fd, from, to_dir_fd, to)) { + switch (renameat(from_dir_fd, from, to_dir_fd, to)) { .err => |err| { return .{ .err = err }; }, @@ -2460,7 +2455,7 @@ pub fn renameat2(from_dir: bun.FileDescriptor, from: [:0]const u8, to_dir: bun.F while (true) { const rc = switch (comptime Environment.os) { .linux => std.os.linux.renameat2(@intCast(from_dir.cast()), from.ptr, @intCast(to_dir.cast()), to.ptr, flags.int()), - .mac => bun.C.renameatx_np(@intCast(from_dir.cast()), from.ptr, @intCast(to_dir.cast()), to.ptr, flags.int()), + .mac => bun.c.renameatx_np(@intCast(from_dir.cast()), from.ptr, @intCast(to_dir.cast()), to.ptr, flags.int()), else => @compileError("renameat2() is not implemented on this platform"), }; @@ -2485,7 +2480,7 @@ pub fn renameat(from_dir: bun.FileDescriptor, from: [:0]const u8, to_dir: bun.Fi bun.WPathBufferPool.put(w_buf_to); } - const rc = bun.C.renameAtW( + const rc = bun.windows.renameAtW( from_dir, bun.strings.toNTPath(w_buf_from, from), to_dir, @@ -2510,7 +2505,7 @@ pub fn renameat(from_dir: bun.FileDescriptor, from: [:0]const u8, to_dir: bun.Fi pub fn chown(path: [:0]const u8, uid: posix.uid_t, gid: posix.gid_t) Maybe(void) { while (true) { - if (Maybe(void).errnoSysP(C.chown(path, uid, gid), .chown, path)) |err| { + if (Maybe(void).errnoSysP(c.chown(path, uid, gid), .chown, path)) |err| { if (err.getErrno() == .INTR) continue; return err; } @@ -2633,7 +2628,7 @@ pub fn clonefile(from: [:0]const u8, to: [:0]const u8) Maybe(void) { if (comptime !Environment.isMac) @compileError("macOS only"); while (true) { - if (Maybe(void).errnoSys(C.darwin.clonefile(from, to, 0), .clonefile)) |err| { + if (Maybe(void).errnoSys(c.clonefile(from, to, 0), .clonefile)) |err| { if (err.getErrno() == .INTR) continue; return err; } @@ -2645,7 +2640,7 @@ pub fn copyfile(from: [:0]const u8, to: [:0]const u8, flags: posix.system.COPYFI if (comptime !Environment.isMac) @compileError("macOS only"); while (true) { - if (Maybe(void).errnoSys(C.darwin.copyfile(from, to, null, flags), .copyfile)) |err| { + if (Maybe(void).errnoSys(c.copyfile(from, to, null, flags), .copyfile)) |err| { if (err.getErrno() == .INTR) continue; return err; } @@ -2745,7 +2740,7 @@ pub fn getFdPath(fd: bun.FileDescriptor, out_buffer: *bun.PathBuffer) Maybe([]u8 .windows => { var wide_buf: [windows.PATH_MAX_WIDE]u16 = undefined; const wide_slice = bun.windows.GetFinalPathNameByHandle(fd.cast(), .{}, wide_buf[0..]) catch { - return Maybe([]u8){ .err = .{ .errno = @intFromEnum(bun.C.SystemErrno.EBADF), .syscall = .GetFinalPathNameByHandle } }; + return Maybe([]u8){ .err = .{ .errno = @intFromEnum(SystemErrno.EBADF), .syscall = .GetFinalPathNameByHandle } }; }; // Trust that Windows gives us valid UTF-16LE. @@ -2791,7 +2786,7 @@ pub fn mmap( const fail = std.c.MAP_FAILED; if (rc == fail) { return .initErr(.{ - .errno = @as(Syscall.Error.Int, @truncate(@intFromEnum(bun.C.getErrno(@as(i64, @bitCast(@intFromPtr(fail))))))), + .errno = @as(sys.Error.Int, @truncate(@intFromEnum(getErrno(@as(i64, @bitCast(@intFromPtr(fail))))))), .syscall = .mmap, }); } @@ -2905,7 +2900,7 @@ pub fn socketpair(domain: socketpair_t, socktype: socketpair_t, protocol: socket break; } - const err: ?Syscall.Error = err: { + const err: ?sys.Error = err: { // Set O_CLOEXEC first. inline for (0..2) |i| { @@ -3010,7 +3005,7 @@ pub fn getMaxPipeSizeOnLinux() usize { fn once() c_int { const strings = bun.strings; const default_out_size = 512 * 1024; - const pipe_max_size_fd = switch (bun.sys.open("/proc/sys/fs/pipe-max-size", bun.O.RDONLY, 0)) { + const pipe_max_size_fd = switch (open("/proc/sys/fs/pipe-max-size", bun.O.RDONLY, 0)) { .result => |fd2| fd2, .err => |err| { log("Failed to open /proc/sys/fs/pipe-max-size: {d}\n", .{err.errno}); @@ -3019,7 +3014,7 @@ pub fn getMaxPipeSizeOnLinux() usize { }; defer pipe_max_size_fd.close(); var max_pipe_size_buf: [128]u8 = undefined; - const max_pipe_size = switch (bun.sys.read(pipe_max_size_fd, max_pipe_size_buf[0..])) { + const max_pipe_size = switch (read(pipe_max_size_fd, max_pipe_size_buf[0..])) { .result => |bytes_read| std.fmt.parseInt(i64, strings.trim(max_pipe_size_buf[0..bytes_read], "\n"), 10) catch |err| { log("Failed to parse /proc/sys/fs/pipe-max-size: {any}\n", .{@errorName(err)}); return default_out_size; @@ -3188,7 +3183,7 @@ pub fn faccessat(dir_fd: bun.FileDescriptor, subpath: anytype) JSC.Maybe(bool) { if (comptime Environment.isLinux) { // avoid loading the libc symbol for this to reduce chances of GLIBC minimum version requirements const rc = linux.faccessat(dir_fd.cast(), subpath, linux.F_OK, 0); - syslog("faccessat({}, {}, O_RDONLY, 0) = {d}", .{ dir_fd, bun.fmt.fmtOSPath(subpath, .{}), if (rc == 0) 0 else @intFromEnum(bun.C.getErrno(rc)) }); + syslog("faccessat({}, {}, O_RDONLY, 0) = {d}", .{ dir_fd, bun.fmt.fmtOSPath(subpath, .{}), if (rc == 0) 0 else @intFromEnum(getErrno(rc)) }); if (rc == 0) { return JSC.Maybe(bool){ .result = true }; } @@ -3198,7 +3193,7 @@ pub fn faccessat(dir_fd: bun.FileDescriptor, subpath: anytype) JSC.Maybe(bool) { // on other platforms use faccessat from libc const rc = std.c.faccessat(dir_fd.cast(), subpath, std.posix.F_OK, 0); - syslog("faccessat({}, {}, O_RDONLY, 0) = {d}", .{ dir_fd, bun.fmt.fmtOSPath(subpath, .{}), if (rc == 0) 0 else @intFromEnum(bun.C.getErrno(rc)) }); + syslog("faccessat({}, {}, O_RDONLY, 0) = {d}", .{ dir_fd, bun.fmt.fmtOSPath(subpath, .{}), if (rc == 0) 0 else @intFromEnum(getErrno(rc)) }); if (rc == 0) { return JSC.Maybe(bool){ .result = true }; } @@ -3232,7 +3227,7 @@ pub fn futimens(fd: bun.FileDescriptor, atime: JSC.Node.TimeLike, mtime: JSC.Nod return Maybe(void).success; } - switch (bun.C.getErrno(rc)) { + switch (getErrno(rc)) { .INTR => continue, else => return Maybe(void).errnoSysFd(rc, .futimens, fd).?, } @@ -3263,7 +3258,7 @@ fn utimensWithFlags(path: bun.OSPathSliceZ, atime: JSC.Node.TimeLike, mtime: JSC return Maybe(void).success; } - switch (bun.C.getErrno(rc)) { + switch (getErrno(rc)) { .INTR => continue, else => return Maybe(void).errnoSysP(rc, .utimensat, path).?, } @@ -3273,7 +3268,7 @@ fn utimensWithFlags(path: bun.OSPathSliceZ, atime: JSC.Node.TimeLike, mtime: JSC } pub fn getFcntlFlags(fd: bun.FileDescriptor) Maybe(fnctl_int) { - return switch (bun.sys.fcntl( + return switch (fcntl( fd, std.posix.F.GETFL, 0, @@ -3300,7 +3295,7 @@ pub fn updateNonblocking(fd: bun.FileDescriptor, nonblocking: bool) Maybe(void) const new_flags: i32 = if (nonblocking) current_flags | @as(i32, bun.O.NONBLOCK) else current_flags & ~@as(i32, bun.O.NONBLOCK); if (new_flags != current_flags) { - switch (bun.sys.fcntl(fd, std.posix.F.SETFL, @as(fnctl_int, @intCast(new_flags)))) { + switch (fcntl(fd, std.posix.F.SETFL, @as(fnctl_int, @intCast(new_flags)))) { .err => |err| return .{ .err = err }, .result => {}, } @@ -3348,15 +3343,15 @@ pub fn existsAtType(fd: bun.FileDescriptor, subpath: anytype) Maybe(ExistsAtType return .{ .err = err.err }; } - const is_regular_file = basic_info.FileAttributes != bun.c.INVALID_FILE_ATTRIBUTES and + const is_regular_file = basic_info.FileAttributes != c.INVALID_FILE_ATTRIBUTES and // from libuv: directories cannot be read-only // https://github.com/libuv/libuv/blob/eb5af8e3c0ea19a6b0196d5db3212dae1785739b/src/win/fs.c#L2144-L2146 - (basic_info.FileAttributes & bun.c.FILE_ATTRIBUTE_DIRECTORY == 0 or - basic_info.FileAttributes & bun.c.FILE_ATTRIBUTE_READONLY == 0); + (basic_info.FileAttributes & c.FILE_ATTRIBUTE_DIRECTORY == 0 or + basic_info.FileAttributes & c.FILE_ATTRIBUTE_READONLY == 0); - const is_dir = basic_info.FileAttributes != bun.c.INVALID_FILE_ATTRIBUTES and - basic_info.FileAttributes & bun.c.FILE_ATTRIBUTE_DIRECTORY != 0 and - basic_info.FileAttributes & bun.c.FILE_ATTRIBUTE_READONLY == 0; + const is_dir = basic_info.FileAttributes != c.INVALID_FILE_ATTRIBUTES and + basic_info.FileAttributes & c.FILE_ATTRIBUTE_DIRECTORY != 0 and + basic_info.FileAttributes & c.FILE_ATTRIBUTE_READONLY == 0; return if (is_dir) { syslog("NtQueryAttributesFile({}, O_RDONLY, 0) = directory", .{bun.fmt.fmtOSPath(path, .{})}); @@ -3366,7 +3361,7 @@ pub fn existsAtType(fd: bun.FileDescriptor, subpath: anytype) Maybe(ExistsAtType return .{ .result = .file }; } else { syslog("NtQueryAttributesFile({}, O_RDONLY, 0) = {d}", .{ bun.fmt.fmtOSPath(path, .{}), basic_info.FileAttributes }); - return .{ .err = bun.sys.Error.fromCode(.UNKNOWN, .access) }; + return .{ .err = Error.fromCode(.UNKNOWN, .access) }; }; } @@ -3559,7 +3554,7 @@ pub fn dupWithFlags(fd: bun.FileDescriptor, _: i32) Maybe(bun.FileDescriptor) { } const ArgType = if (comptime Environment.isLinux) usize else c_int; - const out = switch (fcntl(fd, @as(i32, bun.C.F.DUPFD_CLOEXEC), @as(ArgType, 0))) { + const out = switch (fcntl(fd, @as(i32, bun.c.F_DUPFD_CLOEXEC), @as(ArgType, 0))) { .result => |result| result, .err => |err| return .{ .err = err }, }; @@ -3577,14 +3572,14 @@ pub fn linkat(dir_fd: bun.FileDescriptor, basename: []const u8, dest_dir_fd: bun @intCast(dir_fd), &(std.posix.toPosixPath(basename) catch return .{ .err = .{ - .errno = @intFromEnum(bun.C.E.NOMEM), + .errno = @intFromEnum(E.NOMEM), .syscall = .open, }, }), @intCast(dest_dir_fd), &(std.posix.toPosixPath(dest_name) catch return .{ .err = .{ - .errno = @intFromEnum(bun.C.E.NOMEM), + .errno = @intFromEnum(E.NOMEM), .syscall = .open, }, }), @@ -3656,12 +3651,21 @@ pub fn linkatTmpfile(tmpfd: bun.FileDescriptor, dirfd: bun.FileDescriptor, name: } } +/// c-bindings.cpp +extern "c" fn sys_preadv2( + fd: c_int, + iov: [*]const std.posix.iovec, + iovcnt: c_int, + offset: std.posix.off_t, + flags: c_uint, +) isize; + /// On Linux, this `preadv2(2)` to attempt to read a blocking file descriptor without blocking. /// /// On other platforms, this is just a wrapper around `read(2)`. pub fn readNonblocking(fd: bun.FileDescriptor, buf: []u8) Maybe(usize) { if (Environment.isLinux) { - while (bun.C.linux.RWFFlagSupport.isMaybeSupported()) { + while (bun.linux.RWFFlagSupport.isMaybeSupported()) { const iovec = [1]std.posix.iovec{.{ .base = buf.ptr, .len = buf.len, @@ -3669,7 +3673,7 @@ pub fn readNonblocking(fd: bun.FileDescriptor, buf: []u8) Maybe(usize) { var debug_timer = bun.Output.DebugTimer.start(); // Note that there is a bug on Linux Kernel 5 - const rc = C.sys_preadv2(fd.native(), &iovec, 1, -1, std.os.linux.RWF.NOWAIT); + const rc = sys_preadv2(fd.native(), &iovec, 1, -1, std.os.linux.RWF.NOWAIT); if (comptime Environment.isDebug) { log("preadv2({}, {d}) = {d} ({})", .{ fd, buf.len, rc, debug_timer }); @@ -3682,7 +3686,7 @@ pub fn readNonblocking(fd: bun.FileDescriptor, buf: []u8) Maybe(usize) { if (Maybe(usize).errnoSysFd(rc, .read, fd)) |err| { switch (err.getErrno()) { .OPNOTSUPP, .NOSYS => { - bun.C.linux.RWFFlagSupport.disable(); + bun.linux.RWFFlagSupport.disable(); switch (bun.isReadable(fd)) { .hup, .ready => return read(fd, buf), else => return .{ .err = Error.retry }, @@ -3700,12 +3704,21 @@ pub fn readNonblocking(fd: bun.FileDescriptor, buf: []u8) Maybe(usize) { return read(fd, buf); } +/// c-bindings.cpp +pub extern "c" fn sys_pwritev2( + fd: c_int, + iov: [*]const std.posix.iovec_const, + iovcnt: c_int, + offset: std.posix.off_t, + flags: c_uint, +) isize; + /// On Linux, this `pwritev(2)` to attempt to read a blocking file descriptor without blocking. /// /// On other platforms, this is just a wrapper around `read(2)`. pub fn writeNonblocking(fd: bun.FileDescriptor, buf: []const u8) Maybe(usize) { if (Environment.isLinux) { - while (bun.C.linux.RWFFlagSupport.isMaybeSupported()) { + while (bun.linux.RWFFlagSupport.isMaybeSupported()) { const iovec = [1]std.posix.iovec_const{.{ .base = buf.ptr, .len = buf.len, @@ -3713,7 +3726,7 @@ pub fn writeNonblocking(fd: bun.FileDescriptor, buf: []const u8) Maybe(usize) { var debug_timer = bun.Output.DebugTimer.start(); - const rc = C.sys_pwritev2(fd.native(), &iovec, 1, -1, std.os.linux.RWF.NOWAIT); + const rc = sys_pwritev2(fd.native(), &iovec, 1, -1, std.os.linux.RWF.NOWAIT); if (comptime Environment.isDebug) { log("pwritev2({}, {d}) = {d} ({})", .{ fd, buf.len, rc, debug_timer }); @@ -3726,7 +3739,7 @@ pub fn writeNonblocking(fd: bun.FileDescriptor, buf: []const u8) Maybe(usize) { if (Maybe(usize).errnoSysFd(rc, .write, fd)) |err| { switch (err.getErrno()) { .OPNOTSUPP, .NOSYS => { - bun.C.linux.RWFFlagSupport.disable(); + bun.linux.RWFFlagSupport.disable(); switch (bun.isWritable(fd)) { .hup, .ready => return write(fd, buf), else => return .{ .err = Error.retry }, @@ -3905,7 +3918,7 @@ pub const File = struct { // On Windows, close the file before moving it. if (Environment.isWindows) this.close(); const cwd = bun.FD.cwd(); - try bun.C.moveFileZWithHandle(this.handle, cwd, src, cwd, dest); + try bun.sys.moveFileZWithHandle(this.handle, cwd, src, cwd, dest); } fn stdIoRead(this: File, buf: []u8) ReadError!usize { @@ -3971,7 +3984,7 @@ pub const File = struct { switch (windows.GetLastError()) { .SUCCESS => {}, else => |err| { - return .{ .err = Error.fromCode((bun.C.SystemErrno.init(err) orelse bun.C.SystemErrno.EUNKNOWN).toE(), .fstat) }; + return .{ .err = Error.fromCode((SystemErrno.init(err) orelse SystemErrno.EUNKNOWN).toE(), .fstat) }; }, } } @@ -4023,9 +4036,9 @@ pub const File = struct { var read_amount: usize = 0; while (read_amount < buf.len) { switch (if (comptime Environment.isPosix) - bun.sys.pread(this.handle, buf[read_amount..], @intCast(read_amount)) + pread(this.handle, buf[read_amount..], @intCast(read_amount)) else - bun.sys.read(this.handle, buf[read_amount..])) { + sys.read(this.handle, buf[read_amount..])) { .err => |err| { return .{ .err = err }; }, @@ -4063,9 +4076,9 @@ pub const File = struct { } switch (if (comptime Environment.isPosix) - bun.sys.pread(this.handle, list.unusedCapacitySlice(), total) + pread(this.handle, list.unusedCapacitySlice(), total) else - bun.sys.read(this.handle, list.unusedCapacitySlice())) { + sys.read(this.handle, list.unusedCapacitySlice())) { .err => |err| { return .{ .err = err }; }, @@ -4134,10 +4147,10 @@ pub const File = struct { } if (comptime ElementType == u8 and std.meta.sentinel(@TypeOf(path)) == null) { - break :brk Syscall.openatA(from(dir_fd).handle, path, O.RDONLY, 0); + break :brk sys.openatA(from(dir_fd).handle, path, O.RDONLY, 0); } - break :brk Syscall.openat(from(dir_fd).handle, path, O.RDONLY, 0); + break :brk sys.openat(from(dir_fd).handle, path, O.RDONLY, 0); }; const this = switch (rc) { @@ -4281,7 +4294,6 @@ pub const libuv_error_map = brk: { .{ "ENODATA", "no data available" }, .{ "EUNATCH", "protocol driver not attached" }, }; - const SystemErrno = bun.C.SystemErrno; var map = std.EnumMap(SystemErrno, [:0]const u8).initFull("unknown error"); for (entries) |entry| { const key, const text = entry; @@ -4547,7 +4559,6 @@ pub const coreutils_error_map = brk: { }, }; - const SystemErrno = bun.C.SystemErrno; var map = std.EnumMap(SystemErrno, [:0]const u8).initFull("unknown error"); for (entries) |entry| { const key, const text = entry; @@ -4570,3 +4581,416 @@ pub fn selfProcessMemoryUsage() ?usize { } return rss; } + +export fn Bun__errnoName(err: c_int) ?[*:0]const u8 { + return @tagName(SystemErrno.init(err) orelse return null); +} + +// TODO: this is wrong on Windows +const libc_stat = bun.Stat; +const Stat = std.fs.File.Stat; + +pub fn lstat_absolute(path: [:0]const u8) !Stat { + if (builtin.os.tag == .windows) { + @compileError("Not implemented yet, consider using lstat()"); + } + + var st = std.mem.zeroes(libc_stat); + switch (std.posix.errno(workaround_symbols.lstat(path.ptr, &st))) { + .SUCCESS => {}, + .NOENT => return error.FileNotFound, + // .EINVAL => unreachable, + .BADF => unreachable, // Always a race condition. + .NOMEM => return error.SystemResources, + .ACCES => return error.AccessDenied, + else => |err| return posix.unexpectedErrno(err), + } + + const atime = st.atime(); + const mtime = st.mtime(); + const ctime = st.ctime(); + const Kind = std.fs.File.Kind; + return Stat{ + .inode = st.ino, + .size = @as(u64, @bitCast(st.size)), + .mode = st.mode, + .kind = switch (builtin.os.tag) { + .wasi => switch (st.filetype) { + posix.FILETYPE_BLOCK_DEVICE => Kind.block_device, + posix.FILETYPE_CHARACTER_DEVICE => Kind.character_device, + posix.FILETYPE_DIRECTORY => Kind.directory, + posix.FILETYPE_SYMBOLIC_LINK => Kind.sym_link, + posix.FILETYPE_REGULAR_FILE => Kind.file, + posix.FILETYPE_SOCKET_STREAM, posix.FILETYPE_SOCKET_DGRAM => Kind.unix_domain_socket, + else => Kind.unknown, + }, + else => switch (st.mode & posix.S.IFMT) { + posix.S.IFBLK => Kind.block_device, + posix.S.IFCHR => Kind.character_device, + posix.S.IFDIR => Kind.directory, + posix.S.IFIFO => Kind.named_pipe, + posix.S.IFLNK => Kind.sym_link, + posix.S.IFREG => Kind.file, + posix.S.IFSOCK => Kind.unix_domain_socket, + else => Kind.unknown, + }, + }, + .atime = @as(i128, atime.sec) * std.time.ns_per_s + atime.nsec, + .mtime = @as(i128, mtime.sec) * std.time.ns_per_s + mtime.nsec, + .ctime = @as(i128, ctime.sec) * std.time.ns_per_s + ctime.nsec, + }; +} + +// renameatZ fails when renaming across mount points +// we assume that this is relatively uncommon +pub fn moveFileZ(from_dir: bun.FileDescriptor, filename: [:0]const u8, to_dir: bun.FileDescriptor, destination: [:0]const u8) !void { + switch (renameatConcurrentlyWithoutFallback(from_dir, filename, to_dir, destination)) { + .err => |err| { + // allow over-writing an empty directory + if (err.getErrno() == .ISDIR) { + _ = rmdirat(to_dir, destination.ptr); + try renameat(from_dir, filename, to_dir, destination).unwrap(); + return; + } + + if (err.getErrno() == .XDEV) { + try moveFileZSlow(from_dir, filename, to_dir, destination); + } else { + return bun.errnoToZigErr(err.errno); + } + }, + .result => {}, + } +} + +pub fn moveFileZWithHandle(from_handle: bun.FileDescriptor, from_dir: bun.FileDescriptor, filename: [:0]const u8, to_dir: bun.FileDescriptor, destination: [:0]const u8) !void { + switch (renameat(from_dir, filename, to_dir, destination)) { + .err => |err| { + // allow over-writing an empty directory + if (err.getErrno() == .ISDIR) { + _ = rmdirat(to_dir, destination.ptr); + + try (renameat(from_dir, filename, to_dir, destination).unwrap()); + return; + } + + if (err.getErrno() == .XDEV) { + try copyFileZSlowWithHandle(from_handle, to_dir, destination).unwrap(); + _ = unlinkat(from_dir, filename); + } + + return bun.errnoToZigErr(err.errno); + }, + .result => {}, + } +} + +// On Linux, this will be fast because sendfile() supports copying between two file descriptors on disk +// macOS & BSDs will be slow because +pub fn moveFileZSlow(from_dir: bun.FileDescriptor, filename: [:0]const u8, to_dir: bun.FileDescriptor, destination: [:0]const u8) !void { + return try moveFileZSlowMaybe(from_dir, filename, to_dir, destination).unwrap(); +} + +pub fn moveFileZSlowMaybe(from_dir: bun.FileDescriptor, filename: [:0]const u8, to_dir: bun.FileDescriptor, destination: [:0]const u8) Maybe(void) { + const in_handle = switch (openat(from_dir, filename, bun.O.RDONLY | bun.O.CLOEXEC, if (Environment.isWindows) 0 else 0o644)) { + .result => |f| f, + .err => |e| return .{ .err = e }, + }; + defer in_handle.close(); + _ = from_dir.unlinkat(filename); + return copyFileZSlowWithHandle(in_handle, to_dir, destination); +} + +pub fn copyFileZSlowWithHandle(in_handle: bun.FileDescriptor, to_dir: bun.FileDescriptor, destination: [:0]const u8) Maybe(void) { + if (comptime Environment.isWindows) { + var buf0: bun.WPathBuffer = undefined; + var buf1: bun.WPathBuffer = undefined; + + const dest = switch (normalizePathWindows(u8, to_dir, destination, &buf0, .{})) { + .result => |x| x, + .err => |e| return .{ .err = e }, + }; + const src_len = bun.windows.GetFinalPathNameByHandleW(in_handle.cast(), &buf1, buf1.len, 0); + if (src_len == 0) { + return Maybe(void).errno(bun.sys.E.BUSY, .GetFinalPathNameByHandle); + } else if (src_len >= buf1.len) { + return Maybe(void).errno(bun.sys.E.NAMETOOLONG, .GetFinalPathNameByHandle); + } + const src = buf1[0..src_len :0]; + return bun.copyFile(src, dest); + } else { + const stat_ = switch (fstat(in_handle)) { + .result => |s| s, + .err => |e| return .{ .err = e }, + }; + + // Attempt to delete incase it already existed. + // This fixes ETXTBUSY on Linux + _ = unlinkat(to_dir, destination); + + const out_handle = switch (openat( + to_dir, + destination, + bun.O.WRONLY | bun.O.CREAT | bun.O.CLOEXEC | bun.O.TRUNC, + if (comptime Environment.isPosix) 0o644 else 0, + )) { + .result => |fd| fd, + .err => |e| return .{ .err = e }, + }; + defer out_handle.close(); + + if (comptime Environment.isLinux) { + _ = std.os.linux.fallocate(out_handle.cast(), 0, 0, @intCast(stat_.size)); + } + + switch (bun.copyFile(in_handle, out_handle)) { + .err => |e| return .{ .err = e }, + .result => {}, + } + + if (comptime Environment.isPosix) { + _ = bun.c.fchmod(out_handle.cast(), stat_.mode); + _ = bun.c.fchown(out_handle.cast(), stat_.uid, stat_.gid); + } + + return Maybe(void).success; + } +} + +pub fn kindFromMode(mode: mode_t) std.fs.File.Kind { + return switch (mode & bun.S.IFMT) { + bun.S.IFBLK => std.fs.File.Kind.block_device, + bun.S.IFCHR => std.fs.File.Kind.character_device, + bun.S.IFDIR => std.fs.File.Kind.directory, + bun.S.IFIFO => std.fs.File.Kind.named_pipe, + bun.S.IFLNK => std.fs.File.Kind.sym_link, + bun.S.IFREG => std.fs.File.Kind.file, + bun.S.IFSOCK => std.fs.File.Kind.unix_domain_socket, + else => .unknown, + }; +} + +pub fn getSelfExeSharedLibPaths(allocator: std.mem.Allocator) error{OutOfMemory}![][:0]u8 { + const List = std.ArrayList([:0]u8); + switch (builtin.os.tag) { + .linux, + .freebsd, + .netbsd, + .dragonfly, + .openbsd, + .solaris, + => { + var paths = List.init(allocator); + errdefer { + const slice = paths.toOwnedSlice() catch &.{}; + for (slice) |item| { + allocator.free(item); + } + allocator.free(slice); + } + try posix.dl_iterate_phdr(&paths, error{OutOfMemory}, struct { + fn callback(info: *posix.dl_phdr_info, size: usize, list: *List) !void { + _ = size; + const name = info.dlpi_name orelse return; + if (name[0] == '/') { + const item = try list.allocator.dupeZ(u8, mem.sliceTo(name, 0)); + errdefer list.allocator.free(item); + try list.append(item); + } + } + }.callback); + return try paths.toOwnedSlice(); + }, + .macos, .ios, .watchos, .tvos => { + var paths = List.init(allocator); + errdefer { + const slice = paths.toOwnedSlice() catch &.{}; + for (slice) |item| { + allocator.free(item); + } + allocator.free(slice); + } + const img_count = std.c._dyld_image_count(); + for (0..img_count) |i| { + const name = std.c._dyld_get_image_name(i); + const item = try allocator.dupeZ(u8, mem.sliceTo(name, 0)); + errdefer allocator.free(item); + try paths.append(item); + } + return try paths.toOwnedSlice(); + }, + // revisit if Haiku implements dl_iterat_phdr (https://dev.haiku-os.org/ticket/15743) + .haiku => { + var paths = List.init(allocator); + errdefer { + const slice = paths.toOwnedSlice() catch &.{}; + for (slice) |item| { + allocator.free(item); + } + allocator.free(slice); + } + + const b = "/boot/system/runtime_loader"; + const item = try allocator.dupeZ(u8, mem.sliceTo(b, 0)); + errdefer allocator.free(item); + try paths.append(item); + + return try paths.toOwnedSlice(); + }, + else => @compileError("getSelfExeSharedLibPaths unimplemented for this target"), + } +} + +pub const preallocate_length = switch (bun.Environment.os) { + .linux => 2048 * 1024, + else => {}, +}; +pub const preallocate_supported = @TypeOf(preallocate_length) != void; + +// https://gist.github.com/Jarred-Sumner/b37b93399b63cbfd86e908c59a0a37df +// ext4 NVME Linux kernel 5.17.0-1016-oem x86_64 +// +// hyperfine "./micro 1024 temp" "./micro 1024 temp --preallocate" --prepare="rm -rf temp && free && sync && echo 3 > /proc/sys/vm/drop_caches && free" +// Benchmark 1: ./micro 1024 temp +// Time (mean ± σ): 1.8 ms ± 0.2 ms [User: 0.6 ms, System: 0.1 ms] +// Range (min … max): 1.2 ms … 2.3 ms 67 runs +// Benchmark 2: ./micro 1024 temp --preallocate +// Time (mean ± σ): 1.8 ms ± 0.1 ms [User: 0.6 ms, System: 0.1 ms] +// Range (min … max): 1.4 ms … 2.2 ms 121 runs +// Summary +// './micro 1024 temp --preallocate' ran +// 1.01 ± 0.13 times faster than './micro 1024 temp' +// +// hyperfine "./micro 65432 temp" "./micro 65432 temp --preallocate" --prepare="rm -rf temp && free && sync && echo 3 > /proc/sys/vm/drop_caches && free" +// Benchmark 1: ./micro 65432 temp +// Time (mean ± σ): 1.8 ms ± 0.2 ms [User: 0.7 ms, System: 0.1 ms] +// Range (min … max): 1.2 ms … 2.3 ms 94 runs +// Benchmark 2: ./micro 65432 temp --preallocate +// Time (mean ± σ): 2.0 ms ± 0.1 ms [User: 0.6 ms, System: 0.1 ms] +// Range (min … max): 1.7 ms … 2.3 ms 108 runs +// Summary +// './micro 65432 temp' ran +// 1.08 ± 0.12 times faster than './micro 65432 temp --preallocate' +// +// hyperfine "./micro 654320 temp" "./micro 654320 temp --preallocate" --prepare="rm -rf temp && free && sync && echo 3 > /proc/sys/vm/drop_caches && free" +// Benchmark 1: ./micro 654320 temp +// Time (mean ± σ): 2.3 ms ± 0.2 ms [User: 0.9 ms, System: 0.3 ms] +// Range (min … max): 1.9 ms … 2.9 ms 96 runs +// +// Benchmark 2: ./micro 654320 temp --preallocate +// Time (mean ± σ): 2.2 ms ± 0.1 ms [User: 0.9 ms, System: 0.2 ms] +// Range (min … max): 1.9 ms … 2.7 ms 115 runs +// +// Warning: Command took less than 5 ms to complete. Results might be inaccurate. +// +// Summary +// './micro 654320 temp --preallocate' ran +// 1.04 ± 0.10 times faster than './micro 654320 temp' +// +// hyperfine "./micro 6543200 temp" "./micro 6543200 temp --preallocate" --prepare="rm -rf temp && free && sync && echo 3 > /proc/sys/vm/drop_caches && free" +// Benchmark 1: ./micro 6543200 temp +// Time (mean ± σ): 6.3 ms ± 0.4 ms [User: 0.4 ms, System: 4.9 ms] +// Range (min … max): 5.8 ms … 8.6 ms 84 runs +// +// Benchmark 2: ./micro 6543200 temp --preallocate +// Time (mean ± σ): 5.5 ms ± 0.3 ms [User: 0.5 ms, System: 3.9 ms] +// Range (min … max): 5.1 ms … 7.1 ms 93 runs +// +// Summary +// './micro 6543200 temp --preallocate' ran +// 1.14 ± 0.09 times faster than './micro 6543200 temp' +// +// hyperfine "./micro 65432000 temp" "./micro 65432000 temp --preallocate" --prepare="rm -rf temp && free && sync && echo 3 > /proc/sys/vm/drop_caches && free" +// Benchmark 1: ./micro 65432000 temp +// Time (mean ± σ): 52.9 ms ± 0.4 ms [User: 3.1 ms, System: 48.7 ms] +// Range (min … max): 52.4 ms … 54.4 ms 36 runs +// +// Benchmark 2: ./micro 65432000 temp --preallocate +// Time (mean ± σ): 44.6 ms ± 0.8 ms [User: 2.3 ms, System: 41.2 ms] +// Range (min … max): 44.0 ms … 47.3 ms 37 runs +// +// Summary +// './micro 65432000 temp --preallocate' ran +// 1.19 ± 0.02 times faster than './micro 65432000 temp' +// +// hyperfine "./micro 65432000 temp" "./micro 65432000 temp --preallocate" --prepare="rm -rf temp" +// Benchmark 1: ./micro 65432000 temp +// Time (mean ± σ): 51.7 ms ± 0.9 ms [User: 2.1 ms, System: 49.6 ms] +// Range (min … max): 50.7 ms … 54.1 ms 49 runs +// +// Benchmark 2: ./micro 65432000 temp --preallocate +// Time (mean ± σ): 43.8 ms ± 2.3 ms [User: 2.2 ms, System: 41.4 ms] +// Range (min … max): 42.7 ms … 54.7 ms 56 runs +// +// Summary +// './micro 65432000 temp --preallocate' ran +// 1.18 ± 0.06 times faster than './micro 65432000 temp' +// +pub fn preallocate_file(fd: std.posix.fd_t, offset: std.posix.off_t, len: std.posix.off_t) anyerror!void { + switch (Environment.os) { + .linux => { + _ = std.os.linux.fallocate(fd, 0, @as(i64, @intCast(offset)), len); + }, + .mac => { + // benchmarking this did nothing on macOS + // i verified it wasn't returning -1 + + // Based on https://api.kde.org/frameworks/kcoreaddons/html/posix__fallocate__mac_8h_source.html + // var fstore = zeroes(fstore_t); + // fstore.fst_flags = F_ALLOCATECONTIG; + // fstore.fst_posmode = F_PEOFPOSMODE; + // fstore.fst_offset = 0; + // fstore.fst_length = len + offset; + // var rc = os.system.fcntl(fd, os.F.PREALLOCATE, &fstore); + }, + else => {}, // not tested, + } +} + +pub fn dlopen(filename: [:0]const u8, flags: std.c.RTLD) ?*anyopaque { + if (comptime Environment.isWindows) { + return bun.windows.LoadLibraryA(filename); + } + + return std.c.dlopen(filename, flags); +} + +pub fn dlsymImpl(handle: ?*anyopaque, name: [:0]const u8) ?*anyopaque { + if (comptime Environment.isWindows) { + return bun.windows.GetProcAddressA(handle, name); + } else if (comptime Environment.isMac or Environment.isLinux) { + return std.c.dlsym(handle, name.ptr); + } + + @compileError("dlsym unimplemented for this target"); +} + +pub fn dlsymWithHandle(comptime Type: type, comptime name: [:0]const u8, comptime handle_getter: fn () ?*anyopaque) ?Type { + if (comptime @typeInfo(Type) != .pointer) { + @compileError("dlsym must be a pointer type (e.g. ?const *fn()). Received " ++ @typeName(Type) ++ "."); + } + + const Wrapper = struct { + pub var function: Type = undefined; + var failed = false; + pub var once = std.once(loadOnce); + fn loadOnce() void { + function = bun.cast(Type, dlsymImpl(@call(bun.callmod_inline, handle_getter, .{}), name) orelse { + failed = true; + return; + }); + } + }; + Wrapper.once.call(); + if (Wrapper.failed) { + return null; + } + return Wrapper.function; +} + +pub const umask = switch (Environment.os) { + else => bun.c.umask, + // Using the same typedef and define for `mode_t` and `umask` as node on windows. + // https://github.com/nodejs/node/blob/ad5e2dab4c8306183685973387829c2f69e793da/src/node_process_methods.cc#L29 + .windows => @extern(*const fn (mode: u16) callconv(.c) u16, .{ .name = "_umask" }), +}; diff --git a/src/sys_uv.zig b/src/sys_uv.zig index f96e60cb1e..40a5a091cf 100644 --- a/src/sys_uv.zig +++ b/src/sys_uv.zig @@ -11,8 +11,6 @@ const kernel32 = bun.windows; const linux = posix.linux; const uv = bun.windows.libuv; -const C = bun.C; -const E = C.E; const Environment = bun.Environment; const FileDescriptor = bun.FileDescriptor; const JSC = bun.JSC; @@ -181,7 +179,7 @@ pub fn readlink(file_path: [:0]const u8, buf: []u8) Maybe([:0]u8) { const slice = bun.span(req.ptrAs([*:0]u8)); if (slice.len > buf.len) { log("uv readlink({s}) = {d}, {s} TRUNCATED", .{ file_path, rc.int(), slice }); - return .{ .err = .{ .errno = @intFromEnum(E.NOMEM), .syscall = .readlink, .path = file_path } }; + return .{ .err = .{ .errno = @intFromEnum(bun.sys.E.NOMEM), .syscall = .readlink, .path = file_path } }; } log("uv readlink({s}) = {d}, {s}", .{ file_path, rc.int(), slice }); @memcpy(buf[0..slice.len], slice); diff --git a/src/tmp.zig b/src/tmp.zig index 08eee9bf60..72eb3edddf 100644 --- a/src/tmp.zig +++ b/src/tmp.zig @@ -84,6 +84,6 @@ pub const Tmpfile = struct { } } - try bun.C.moveFileZWithHandle(this.fd, this.destination_dir, this.tmpfilename, this.destination_dir, destname); + try bun.sys.moveFileZWithHandle(this.fd, this.destination_dir, this.tmpfilename, this.destination_dir, destname); } }; diff --git a/src/toml/toml_parser.zig b/src/toml/toml_parser.zig index 035d6fe19e..fbbdf466ed 100644 --- a/src/toml/toml_parser.zig +++ b/src/toml/toml_parser.zig @@ -16,7 +16,7 @@ const strings = bun.strings; const MutableString = bun.MutableString; const stringZ = bun.stringZ; const default_allocator = bun.default_allocator; -const C = bun.C; + const expect = std.testing.expect; const ImportKind = importRecord.ImportKind; const BindingNodeIndex = js_ast.BindingNodeIndex; diff --git a/src/tracy.zig b/src/tracy.zig index 341ded983b..e656f8ac46 100644 --- a/src/tracy.zig +++ b/src/tracy.zig @@ -534,14 +534,14 @@ fn dlsym(comptime Type: type, comptime symbol: [:0]const u8) ?Type { const RLTD: std.c.RTLD = if (bun.Environment.isMac) @bitCast(@as(i32, -2)) else if (bun.Environment.isLinux) .{} else {}; if (bun.getenvZ("BUN_TRACY_PATH")) |path| { - const handle = bun.C.dlopen(&(std.posix.toPosixPath(path) catch unreachable), RLTD); + const handle = bun.sys.dlopen(&(std.posix.toPosixPath(path) catch unreachable), RLTD); if (handle != null) { Handle.handle = handle; break :get; } } inline for (comptime paths_to_try) |path| { - const handle = bun.C.dlopen(path, RLTD); + const handle = bun.sys.dlopen(path, RLTD); if (handle != null) { Handle.handle = handle; break; diff --git a/src/transpiler.zig b/src/transpiler.zig index 7a8f282d18..b7787ee27d 100644 --- a/src/transpiler.zig +++ b/src/transpiler.zig @@ -9,7 +9,7 @@ const stringZ = bun.stringZ; const default_allocator = bun.default_allocator; const StoredFileDescriptorType = bun.StoredFileDescriptorType; const FeatureFlags = bun.FeatureFlags; -const C = bun.C; + const std = @import("std"); const lex = bun.js_lexer; const logger = bun.logger; @@ -1207,7 +1207,7 @@ pub const Transpiler = struct { var path_buf2: bun.PathBuffer = undefined; @memcpy(path_buf2[0..path.text.len], path.text); path_buf2[path.text.len..][0..bun.bytecode_extension.len].* = bun.bytecode_extension.*; - const bytecode = bun.sys.File.toSourceAt(dirname_fd, path_buf2[0 .. path.text.len + bun.bytecode_extension.len], bun.default_allocator).asValue() orelse break :brk default_value; + const bytecode = bun.sys.File.toSourceAt(dirname_fd.unwrapValid() orelse bun.FD.cwd(), path_buf2[0 .. path.text.len + bun.bytecode_extension.len], bun.default_allocator).asValue() orelse break :brk default_value; if (bytecode.contents.len == 0) { break :brk default_value; } diff --git a/src/url.zig b/src/url.zig index bb2705ce6a..3ee221ebe6 100644 --- a/src/url.zig +++ b/src/url.zig @@ -10,7 +10,7 @@ const strings = bun.strings; const MutableString = bun.MutableString; const stringZ = bun.stringZ; const default_allocator = bun.default_allocator; -const C = bun.C; + const JSC = bun.JSC; // This is close to WHATWG URL, but we don't want the validation errors @@ -1056,7 +1056,7 @@ pub const FormData = struct { } comptime { - const jsFunctionFromMultipartData = JSC.toJSHostFunction(fromMultipartData); + const jsFunctionFromMultipartData = JSC.toJSHostFn(fromMultipartData); @export(&jsFunctionFromMultipartData, .{ .name = "FormData__jsFunctionFromMultipartData" }); } diff --git a/src/valkey/ValkeyCommand.zig b/src/valkey/ValkeyCommand.zig index 325bd9cf9e..13a3df96a6 100644 --- a/src/valkey/ValkeyCommand.zig +++ b/src/valkey/ValkeyCommand.zig @@ -4,7 +4,7 @@ meta: Meta = .{}, pub const Args = union(enum) { slices: []const Slice, - args: []const JSC.Node.BlobOrStringOrBuffer, + args: []const node.BlobOrStringOrBuffer, raw: []const []const u8, pub fn len(this: *const @This()) usize { @@ -158,3 +158,5 @@ const JSC = bun.JSC; const protocol = @import("valkey_protocol.zig"); const std = @import("std"); const Slice = JSC.ZigString.Slice; + +const node = bun.api.node; diff --git a/src/valkey/js_valkey.zig b/src/valkey/js_valkey.zig index 8938a24125..e9dcc69bcc 100644 --- a/src/valkey/js_valkey.zig +++ b/src/valkey/js_valkey.zig @@ -4,14 +4,14 @@ pub const JSValkeyClient = struct { globalObject: *JSC.JSGlobalObject, this_value: JSC.JSRef = JSC.JSRef.empty(), poll_ref: bun.Async.KeepAlive = .{}, - timer: JSC.BunTimer.EventLoopTimer = .{ + timer: Timer.EventLoopTimer = .{ .tag = .ValkeyConnectionTimeout, .next = .{ .sec = 0, .nsec = 0, }, }, - reconnect_timer: JSC.BunTimer.EventLoopTimer = .{ + reconnect_timer: Timer.EventLoopTimer = .{ .tag = .ValkeyConnectionReconnect, .next = .{ .sec = 0, @@ -250,7 +250,7 @@ pub const JSValkeyClient = struct { } /// Safely add a timer with proper reference counting and event loop keepalive - fn addTimer(this: *JSValkeyClient, timer: *JSC.BunTimer.EventLoopTimer, next_timeout_ms: u32) void { + fn addTimer(this: *JSValkeyClient, timer: *Timer.EventLoopTimer, next_timeout_ms: u32) void { this.ref(); defer this.deref(); @@ -274,7 +274,7 @@ pub const JSValkeyClient = struct { } /// Safely remove a timer with proper reference counting and event loop keepalive - fn removeTimer(this: *JSValkeyClient, timer: *JSC.BunTimer.EventLoopTimer) void { + fn removeTimer(this: *JSValkeyClient, timer: *Timer.EventLoopTimer) void { if (timer.state == .ACTIVE) { // Store VM reference to use later @@ -309,7 +309,7 @@ pub const JSValkeyClient = struct { this.timer.state = .CANCELLED; } - pub fn onConnectionTimeout(this: *JSValkeyClient) JSC.BunTimer.EventLoopTimer.Arm { + pub fn onConnectionTimeout(this: *JSValkeyClient) Timer.EventLoopTimer.Arm { debug("onConnectionTimeout", .{}); // Mark timer as fired @@ -342,7 +342,7 @@ pub const JSValkeyClient = struct { return .disarm; } - pub fn onReconnectTimer(this: *JSValkeyClient) JSC.BunTimer.EventLoopTimer.Arm { + pub fn onReconnectTimer(this: *JSValkeyClient) Timer.EventLoopTimer.Arm { debug("Reconnect timer fired, attempting to reconnect", .{}); // Mark timer as fired and store important values before doing any derefs @@ -869,3 +869,5 @@ const Socket = uws.AnySocket; const RedisError = protocol.RedisError; const Command = @import("ValkeyCommand.zig"); const BoringSSL = bun.BoringSSL; + +const Timer = bun.api.Timer; diff --git a/src/watcher/WindowsWatcher.zig b/src/watcher/WindowsWatcher.zig index 67e7ef7879..deefca8b38 100644 --- a/src/watcher/WindowsWatcher.zig +++ b/src/watcher/WindowsWatcher.zig @@ -43,7 +43,7 @@ const DirWatcher = struct { const err = w.kernel32.GetLastError(); log("failed to start watching directory: {s}", .{@tagName(err)}); return .{ .err = .{ - .errno = @intFromEnum(bun.C.SystemErrno.init(err) orelse bun.C.SystemErrno.EINVAL), + .errno = @intFromEnum(bun.sys.SystemErrno.init(err) orelse bun.sys.SystemErrno.EINVAL), .syscall = .watch, } }; } @@ -160,7 +160,7 @@ pub fn next(this: *WindowsWatcher, timeout: Timeout) bun.JSC.Maybe(?EventIterato } else { log("GetQueuedCompletionStatus failed: {s}", .{@tagName(err)}); return .{ .err = .{ - .errno = @intFromEnum(bun.C.SystemErrno.init(err) orelse bun.C.SystemErrno.EINVAL), + .errno = @intFromEnum(bun.sys.SystemErrno.init(err) orelse bun.sys.SystemErrno.EINVAL), .syscall = .watch, } }; } @@ -176,7 +176,7 @@ pub fn next(this: *WindowsWatcher, timeout: Timeout) bun.JSC.Maybe(?EventIterato // TODO close handles? log("shutdown notification in WindowsWatcher.next", .{}); return .{ .err = .{ - .errno = @intFromEnum(bun.C.SystemErrno.ESHUTDOWN), + .errno = @intFromEnum(bun.sys.SystemErrno.ESHUTDOWN), .syscall = .watch, } }; } @@ -184,7 +184,7 @@ pub fn next(this: *WindowsWatcher, timeout: Timeout) bun.JSC.Maybe(?EventIterato } else { log("GetQueuedCompletionStatus returned no overlapped event", .{}); return .{ .err = .{ - .errno = @truncate(@intFromEnum(bun.C.E.INVAL)), + .errno = @truncate(@intFromEnum(bun.sys.E.INVAL)), .syscall = .watch, } }; } diff --git a/src/which_npm_client.zig b/src/which_npm_client.zig index d6791dcec5..8bdb7d5c03 100644 --- a/src/which_npm_client.zig +++ b/src/which_npm_client.zig @@ -7,7 +7,6 @@ const strings = bun.strings; const MutableString = bun.MutableString; const stringZ = bun.stringZ; const default_allocator = bun.default_allocator; -const C = bun.C; const std = @import("std"); diff --git a/src/windows.zig b/src/windows.zig index e0767bad35..4d77f6f032 100644 --- a/src/windows.zig +++ b/src/windows.zig @@ -1,7 +1,15 @@ +//! Platform specific APIs for Windows +//! +//! If an API can be implemented on multiple platforms, +//! it does not belong in this namespace. const bun = @import("bun"); +const builtin = @import("builtin"); const Output = bun.Output; const windows = std.os.windows; +const w = std.os.windows; const win32 = windows; +const log = bun.sys.syslog; +const Maybe = bun.sys.Maybe; const c = bun.c; pub const ntdll = windows.ntdll; @@ -155,7 +163,7 @@ pub extern "kernel32" fn SetCurrentDirectoryW( pub const SetCurrentDirectory = SetCurrentDirectoryW; pub extern "ntdll" fn RtlNtStatusToDosError(win32.NTSTATUS) callconv(windows.WINAPI) Win32Error; -const SystemErrno = bun.C.SystemErrno; +const SystemErrno = bun.sys.SystemErrno; // This was originally copied from Zig's standard library /// Codes are from https://docs.microsoft.com/en-us/openspecs/windows_protocols/ms-erref/18d8fbe8-a967-4f1c-ae50-99ca8e491d2d @@ -3037,15 +3045,15 @@ pub extern "kernel32" fn SetFileInformationByHandle( bufferSize: DWORD, ) BOOL; -pub fn getLastErrno() bun.C.E { - return (bun.C.SystemErrno.init(bun.windows.kernel32.GetLastError()) orelse SystemErrno.EUNKNOWN).toE(); +pub fn getLastErrno() bun.sys.E { + return (bun.sys.SystemErrno.init(bun.windows.kernel32.GetLastError()) orelse SystemErrno.EUNKNOWN).toE(); } pub fn getLastError() anyerror { return bun.errnoToZigErr(getLastErrno()); } -pub fn translateNTStatusToErrno(err: win32.NTSTATUS) bun.C.E { +pub fn translateNTStatusToErrno(err: win32.NTSTATUS) bun.sys.E { return switch (err) { .SUCCESS => .SUCCESS, .ACCESS_DENIED => .PERM, @@ -3260,6 +3268,7 @@ comptime { if (Environment.isWindows) { @export(&Bun__UVSignalHandle__init, .{ .name = "Bun__UVSignalHandle__init" }); @export(&Bun__UVSignalHandle__close, .{ .name = "Bun__UVSignalHandle__close" }); + @export(&@"windows process.dlopen", .{ .name = "Bun__LoadLibraryBunString" }); } } @@ -3698,7 +3707,7 @@ pub fn isWatcherChild() bool { pub fn becomeWatcherManager(allocator: std.mem.Allocator) noreturn { // this process will be the parent of the child process that actually runs the script var procinfo: std.os.windows.PROCESS_INFORMATION = undefined; - bun.C.windows_enable_stdio_inheritance(); + windows_enable_stdio_inheritance(); const job = CreateJobObjectA(null, null) orelse Output.panic( "Could not create watcher Job Object: {s}", .{@tagName(std.os.windows.kernel32.GetLastError())}, @@ -3854,3 +3863,197 @@ pub fn spawnWatcherChild( bun.debugAssert(is_in_job != 0); _ = c.NtClose(procinfo.hThread); } + +/// Returns null on error. Use windows API to lookup the actual error. +/// The reason this function is in zig is so that we can use our own utf16-conversion functions. +/// +/// Using characters16() does not seem to always have the sentinel. or something else +/// broke when I just used it. Not sure. ... but this works! +fn @"windows process.dlopen"(str: *bun.String) callconv(.C) ?*anyopaque { + if (comptime !bun.Environment.isWindows) { + @compileError(unreachable); + } + + var buf: bun.WPathBuffer = undefined; + const data = switch (str.encoding()) { + .utf8 => bun.strings.convertUTF8toUTF16InBuffer(&buf, str.utf8()), + .utf16 => brk: { + @memcpy(buf[0..str.length()], str.utf16()); + break :brk buf[0..str.length()]; + }, + .latin1 => brk: { + bun.strings.copyU8IntoU16(&buf, str.latin1()); + break :brk buf[0..str.length()]; + }, + }; + buf[data.len] = 0; + const LOAD_WITH_ALTERED_SEARCH_PATH = 0x00000008; + return bun.windows.kernel32.LoadLibraryExW(buf[0..data.len :0].ptr, null, LOAD_WITH_ALTERED_SEARCH_PATH); +} + +pub extern fn windows_enable_stdio_inheritance() void; + +/// Extracted from standard library except this takes an open file descriptor +/// +/// NOTE: THE FILE MUST BE OPENED WITH ACCESS_MASK "DELETE" OR THIS WILL FAIL +pub fn deleteOpenedFile(fd: bun.FileDescriptor) Maybe(void) { + comptime bun.assert(builtin.target.os.version_range.windows.min.isAtLeast(.win10_rs5)); + var info = w.FILE_DISPOSITION_INFORMATION_EX{ + .Flags = FILE_DISPOSITION_DELETE | + FILE_DISPOSITION_POSIX_SEMANTICS | + FILE_DISPOSITION_IGNORE_READONLY_ATTRIBUTE, + }; + + var io: w.IO_STATUS_BLOCK = undefined; + const rc = w.ntdll.NtSetInformationFile( + fd.cast(), + &io, + &info, + @sizeOf(w.FILE_DISPOSITION_INFORMATION_EX), + .FileDispositionInformationEx, + ); + + log("deleteOpenedFile({}) = {s}", .{ fd, @tagName(rc) }); + + return if (rc == .SUCCESS) + Maybe(void).success + else + Maybe(void).errno(rc, .NtSetInformationFile); +} + +/// With an open file source_fd, move it into the directory new_dir_fd with the name new_path_w. +/// Does not close the file descriptor. +/// +/// For this to succeed +/// - source_fd must have been opened with access_mask=w.DELETE +/// - new_path_w must be the name of a file. it cannot be a path relative to new_dir_fd. see moveOpenedFileAtLoose +pub fn moveOpenedFileAt( + src_fd: bun.FileDescriptor, + new_dir_fd: bun.FileDescriptor, + new_file_name: []const u16, + replace_if_exists: bool, +) Maybe(void) { + // FILE_RENAME_INFORMATION_EX and FILE_RENAME_POSIX_SEMANTICS require >= win10_rs1, + // but FILE_RENAME_IGNORE_READONLY_ATTRIBUTE requires >= win10_rs5. We check >= rs5 here + // so that we only use POSIX_SEMANTICS when we know IGNORE_READONLY_ATTRIBUTE will also be + // supported in order to avoid either (1) using a redundant call that we can know in advance will return + // STATUS_NOT_SUPPORTED or (2) only setting IGNORE_READONLY_ATTRIBUTE when >= rs5 + // and therefore having different behavior when the Windows version is >= rs1 but < rs5. + comptime bun.assert(builtin.target.os.version_range.windows.min.isAtLeast(.win10_rs5)); + + if (bun.Environment.allow_assert) { + bun.assert(std.mem.indexOfScalar(u16, new_file_name, '/') == null); // Call moveOpenedFileAtLoose + } + + const struct_buf_len = @sizeOf(w.FILE_RENAME_INFORMATION_EX) + (bun.MAX_PATH_BYTES - 1); + var rename_info_buf: [struct_buf_len]u8 align(@alignOf(w.FILE_RENAME_INFORMATION_EX)) = undefined; + + const struct_len = @sizeOf(w.FILE_RENAME_INFORMATION_EX) - 1 + new_file_name.len * 2; + if (struct_len > struct_buf_len) return Maybe(void).errno(bun.sys.E.NAMETOOLONG, .NtSetInformationFile); + + const rename_info = @as(*w.FILE_RENAME_INFORMATION_EX, @ptrCast(&rename_info_buf)); + var io_status_block: w.IO_STATUS_BLOCK = undefined; + + var flags: w.ULONG = w.FILE_RENAME_POSIX_SEMANTICS | w.FILE_RENAME_IGNORE_READONLY_ATTRIBUTE; + if (replace_if_exists) flags |= w.FILE_RENAME_REPLACE_IF_EXISTS; + rename_info.* = .{ + .Flags = flags, + .RootDirectory = if (std.fs.path.isAbsoluteWindowsWTF16(new_file_name)) null else new_dir_fd.cast(), + .FileNameLength = @intCast(new_file_name.len * 2), // already checked error.NameTooLong + .FileName = undefined, + }; + @memcpy(@as([*]u16, &rename_info.FileName)[0..new_file_name.len], new_file_name); + const rc = w.ntdll.NtSetInformationFile( + src_fd.cast(), + &io_status_block, + rename_info, + @intCast(struct_len), // already checked for error.NameTooLong + .FileRenameInformationEx, + ); + log("moveOpenedFileAt({} ->> {} '{}', {s}) = {s}", .{ src_fd, new_dir_fd, bun.fmt.utf16(new_file_name), if (replace_if_exists) "replace_if_exists" else "no flag", @tagName(rc) }); + + if (bun.Environment.isDebug) { + if (rc == .ACCESS_DENIED) { + bun.Output.debugWarn("moveOpenedFileAt was called on a file descriptor without access_mask=w.DELETE", .{}); + } + } + + return if (rc == .SUCCESS) + Maybe(void).success + else + Maybe(void).errno(rc, .NtSetInformationFile); +} + +/// Same as moveOpenedFileAt but allows new_path to be a path relative to new_dir_fd. +/// +/// Aka: moveOpenedFileAtLoose(fd, dir, ".\\a\\relative\\not-normalized-path.txt", false); +pub fn moveOpenedFileAtLoose( + src_fd: bun.FileDescriptor, + new_dir_fd: bun.FileDescriptor, + new_path: []const u16, + replace_if_exists: bool, +) Maybe(void) { + bun.assert(std.mem.indexOfScalar(u16, new_path, '/') == null); // Call bun.strings.toWPathNormalized first + + const without_leading_dot_slash = if (new_path.len >= 2 and new_path[0] == '.' and new_path[1] == '\\') + new_path[2..] + else + new_path; + + if (std.mem.lastIndexOfScalar(u16, new_path, '\\')) |last_slash| { + const dirname = new_path[0..last_slash]; + const fd = switch (bun.sys.openDirAtWindows(new_dir_fd, dirname, .{ .can_rename_or_delete = true, .iterable = false })) { + .err => |e| return .{ .err = e }, + .result => |fd| fd, + }; + defer fd.close(); + + const basename = new_path[last_slash + 1 ..]; + return moveOpenedFileAt(src_fd, fd, basename, replace_if_exists); + } + + // easy mode + return moveOpenedFileAt(src_fd, new_dir_fd, without_leading_dot_slash, replace_if_exists); +} + +/// Derived from std.os.windows.renameAtW +/// Allows more errors +pub fn renameAtW( + old_dir_fd: bun.FileDescriptor, + old_path_w: []const u16, + new_dir_fd: bun.FileDescriptor, + new_path_w: []const u16, + replace_if_exists: bool, +) Maybe(void) { + const src_fd = brk: { + switch (bun.sys.openFileAtWindows( + old_dir_fd, + old_path_w, + .{ + .access_mask = w.SYNCHRONIZE | w.GENERIC_WRITE | w.DELETE | w.FILE_TRAVERSE, + .disposition = w.FILE_OPEN, + .options = w.FILE_SYNCHRONOUS_IO_NONALERT | w.FILE_OPEN_REPARSE_POINT, + }, + )) { + .err => { + // retry, wtihout FILE_TRAVERSE flag + switch (bun.sys.openFileAtWindows( + old_dir_fd, + old_path_w, + .{ + .access_mask = w.SYNCHRONIZE | w.GENERIC_WRITE | w.DELETE, + .disposition = w.FILE_OPEN, + .options = w.FILE_SYNCHRONOUS_IO_NONALERT | w.FILE_OPEN_REPARSE_POINT, + }, + )) { + .err => |err2| return .{ .err = err2 }, + .result => |fd| break :brk fd, + } + }, + .result => |fd| break :brk fd, + } + }; + defer src_fd.close(); + + return moveOpenedFileAt(src_fd, new_dir_fd, new_path_w, replace_if_exists); +} diff --git a/src/workaround_missing_symbols.zig b/src/workaround_missing_symbols.zig new file mode 100644 index 0000000000..7d8ef7ca0d --- /dev/null +++ b/src/workaround_missing_symbols.zig @@ -0,0 +1,126 @@ +const std = @import("std"); + +pub const linux = struct { + + // On linux, bun overrides the libc symbols for various functions. + // This is to compensate for older glibc versions. + + fn simulateLibcErrno(rc: usize) c_int { + const signed: isize = @bitCast(rc); + const int: c_int = @intCast(if (signed > -4096 and signed < 0) -signed else 0); + std.c._errno().* = int; + return if (signed > -4096 and signed < 0) -1 else int; + } + + pub export fn stat(path: [*:0]const u8, buf: *std.os.linux.Stat) c_int { + // https://git.musl-libc.org/cgit/musl/tree/src/stat/stat.c + const rc = std.os.linux.fstatat(std.os.linux.AT.FDCWD, path, buf, 0); + return simulateLibcErrno(rc); + } + + pub const stat64 = stat; + pub const lstat64 = lstat; + pub const fstat64 = fstat; + pub const fstatat64 = fstatat; + + pub export fn lstat(path: [*:0]const u8, buf: *std.os.linux.Stat) c_int { + // https://git.musl-libc.org/cgit/musl/tree/src/stat/lstat.c + const rc = std.os.linux.fstatat(std.os.linux.AT.FDCWD, path, buf, std.os.linux.AT.SYMLINK_NOFOLLOW); + return simulateLibcErrno(rc); + } + + pub export fn fstat(fd: c_int, buf: *std.os.linux.Stat) c_int { + const rc = std.os.linux.fstat(fd, buf); + return simulateLibcErrno(rc); + } + + pub export fn fstatat(dirfd: i32, path: [*:0]const u8, buf: *std.os.linux.Stat, flags: u32) c_int { + const rc = std.os.linux.fstatat(dirfd, path, buf, flags); + return simulateLibcErrno(rc); + } + + pub export fn statx(dirfd: i32, path: [*:0]const u8, flags: u32, mask: u32, buf: *std.os.linux.Statx) c_int { + const rc = std.os.linux.statx(dirfd, path, flags, mask, buf); + return simulateLibcErrno(rc); + } + + pub const memmem = bun.c.memmem; + + comptime { + _ = stat; + _ = stat64; + _ = lstat; + _ = lstat64; + _ = fstat; + _ = fstat64; + _ = fstatat; + _ = statx; + @export(&stat, .{ .name = "stat64" }); + @export(&lstat, .{ .name = "lstat64" }); + @export(&fstat, .{ .name = "fstat64" }); + @export(&fstatat, .{ .name = "fstatat64" }); + } +}; +pub const darwin = struct { + pub const memmem = bun.c.memmem; + + // The symbol name depends on the arch. + + pub const lstat = blk: { + const T = *const fn (?[*:0]const u8, ?*bun.Stat) callconv(.C) c_int; + break :blk @extern(T, .{ .name = if (bun.Environment.isAarch64) "lstat" else "lstat64" }); + }; + pub const fstat = blk: { + const T = *const fn (i32, ?*bun.Stat) callconv(.C) c_int; + break :blk @extern(T, .{ .name = if (bun.Environment.isAarch64) "fstat" else "fstat64" }); + }; + pub const stat = blk: { + const T = *const fn (?[*:0]const u8, ?*bun.Stat) callconv(.C) c_int; + break :blk @extern(T, .{ .name = if (bun.Environment.isAarch64) "stat" else "stat64" }); + }; +}; +pub const windows = struct { + /// Windows doesn't have memmem, so we need to implement it + /// This is used in src/string_immutable.zig + pub export fn memmem(haystack: ?[*]const u8, haystacklen: usize, needle: ?[*]const u8, needlelen: usize) ?[*]const u8 { + // Handle null pointers + if (haystack == null or needle == null) return null; + + // Handle empty needle case + if (needlelen == 0) return haystack; + + // Handle case where needle is longer than haystack + if (needlelen > haystacklen) return null; + + const hay = haystack.?[0..haystacklen]; + const nee = needle.?[0..needlelen]; + + const i = std.mem.indexOf(u8, hay, nee) orelse return null; + return hay.ptr + i; + } + + /// lstat is implemented in workaround-missing-symbols.cpp + pub const lstat = blk: { + const T = *const fn ([*c]const u8, [*c]std.c.Stat) callconv(.C) c_int; + break :blk @extern(T, .{ .name = "lstat64" }); + }; + /// fstat is implemented in workaround-missing-symbols.cpp + pub const fstat = blk: { + const T = *const fn ([*c]const u8, [*c]std.c.Stat) callconv(.C) c_int; + break :blk @extern(T, .{ .name = "fstat64" }); + }; + /// stat is implemented in workaround-missing-symbols.cpp + pub const stat = blk: { + const T = *const fn ([*c]const u8, [*c]std.c.Stat) callconv(.C) c_int; + break :blk @extern(T, .{ .name = "stat64" }); + }; +}; + +pub const current = switch (bun.Environment.os) { + .linux => linux, + .windows => windows, + .mac => darwin, + else => struct {}, +}; + +const bun = @import("bun"); diff --git a/test/internal/ban-words.test.ts b/test/internal/ban-words.test.ts index 818105959b..ddc0702493 100644 --- a/test/internal/ban-words.test.ts +++ b/test/internal/ban-words.test.ts @@ -29,14 +29,18 @@ const words: Record "== alloc.ptr": { reason: "The std.mem.Allocator context pointer can be undefined, which makes this comparison undefined behavior" }, "!= alloc.ptr": { reason: "The std.mem.Allocator context pointer can be undefined, which makes this comparison undefined behavior" }, - [String.raw`: [a-zA-Z0-9_\.\*\?\[\]\(\)]+ = undefined,`]: { reason: "Do not default a struct field to undefined", limit: 242, regex: true }, - "usingnamespace": { reason: "Zig deprecates this, and will not support it in incremental compilation.", limit: 19 }, + [String.raw`: [a-zA-Z0-9_\.\*\?\[\]\(\)]+ = undefined,`]: { reason: "Do not default a struct field to undefined", limit: 241, regex: true }, + "usingnamespace": { reason: "Zig 0.15 will remove `usingnamespace`" }, "std.fs.Dir": { reason: "Prefer bun.sys + bun.FD instead of std.fs", limit: 180 }, "std.fs.cwd": { reason: "Prefer bun.FD.cwd()", limit: 103 }, - "std.fs.File": { reason: "Prefer bun.sys + bun.FD instead of std.fs", limit: 71 }, + "std.fs.File": { reason: "Prefer bun.sys + bun.FD instead of std.fs", limit: 64 }, ".stdFile()": { reason: "Prefer bun.sys + bun.FD instead of std.fs.File. Zig hides 'errno' when Bun wants to match libuv", limit: 18 }, ".stdDir()": { reason: "Prefer bun.sys + bun.FD instead of std.fs.File. Zig hides 'errno' when Bun wants to match libuv", limit: 48 }, + + ".arguments_old(": { reason: "Please migrate to .argumentsAsArray() or another argument API", limit: 291 }, + + "// autofix": { reason: "Evaluate if this variable should be deleted entirely or explicitly discarded.", limit: 176 }, }; const words_keys = [...Object.keys(words)]; diff --git a/test/js/node/buffer.test.js b/test/js/node/buffer.test.js index 54ce5a80b0..43d2ca85ac 100644 --- a/test/js/node/buffer.test.js +++ b/test/js/node/buffer.test.js @@ -457,6 +457,7 @@ for (let withOverridenBufferWrite of [false, true]) { const c = Buffer.from([0, 0, 0, 0, 0]); expect(c.length).toBe(5); expect(c.write("あいうえお", encoding)).toBe(4); + console.log(c.toString(encoding), { encoding } ); expect(c).toStrictEqual(Buffer.from([0x42, 0x30, 0x44, 0x30, 0x00])); });