mirror of
https://github.com/oven-sh/bun
synced 2026-02-09 10:28:47 +00:00
Fixes ENG-21287
Build times, from `bun run build && echo '//' >> src/main.zig && time
bun run build`
|Platform|0.14.1|0.15.2|Speedup|
|-|-|-|-|
|macos debug asan|126.90s|106.27s|1.19x|
|macos debug noasan|60.62s|50.85s|1.19x|
|linux debug asan|292.77s|241.45s|1.21x|
|linux debug noasan|146.58s|130.94s|1.12x|
|linux debug use_llvm=false|n/a|78.27s|1.87x|
|windows debug asan|177.13s|142.55s|1.24x|
Runtime performance:
- next build memory usage may have gone up by 5%. Otherwise seems the
same. Some code with writers may have gotten slower, especially one
instance of a counting writer and a few instances of unbuffered writers
that now have vtable overhead.
- File size reduced by 800kb (from 100.2mb to 99.4mb)
Improvements:
- `@export` hack is no longer needed for watch
- native x86_64 backend for linux builds faster. to use it, set use_llvm
false and no_link_obj false. also set `ASAN_OPTIONS=detect_leaks=0`
otherwise it will spam the output with tens of thousands of lines of
debug info errors. may need to use the zig lldb fork for debugging.
- zig test-obj, which we will be able to use for zig unit tests
Still an issue:
- false 'dependency loop' errors remain in watch mode
- watch mode crashes observed
Follow-up:
- [ ] search `comptime Writer: type` and `comptime W: type` and remove
- [ ] remove format_mode in our zig fork
- [ ] remove deprecated.zig autoFormatLabelFallback
- [ ] remove deprecated.zig autoFormatLabel
- [ ] remove deprecated.BufferedWriter and BufferedReader
- [ ] remove override_no_export_cpp_apis as it is no longer needed
- [ ] css Parser(W) -> Parser, and remove all the comptime writer: type
params
- [ ] remove deprecated writer fully
Files that add lines:
```
649 src/deprecated.zig
167 scripts/pack-codegen-for-zig-team.ts
54 scripts/cleartrace-impl.js
46 scripts/cleartrace.ts
43 src/windows.zig
18 src/fs.zig
17 src/bun.js/ConsoleObject.zig
16 src/output.zig
12 src/bun.js/test/debug.zig
12 src/bun.js/node/node_fs.zig
8 src/env_loader.zig
7 src/css/printer.zig
7 src/cli/init_command.zig
7 src/bun.js/node.zig
6 src/string/escapeRegExp.zig
6 src/install/PnpmMatcher.zig
5 src/bun.js/webcore/Blob.zig
4 src/crash_handler.zig
4 src/bun.zig
3 src/install/lockfile/bun.lock.zig
3 src/cli/update_interactive_command.zig
3 src/cli/pack_command.zig
3 build.zig
2 src/Progress.zig
2 src/install/lockfile/lockfile_json_stringify_for_debugging.zig
2 src/css/small_list.zig
2 src/bun.js/webcore/prompt.zig
1 test/internal/ban-words.test.ts
1 test/internal/ban-limits.json
1 src/watcher/WatcherTrace.zig
1 src/transpiler.zig
1 src/shell/builtin/cp.zig
1 src/js_printer.zig
1 src/io/PipeReader.zig
1 src/install/bin.zig
1 src/css/selectors/selector.zig
1 src/cli/run_command.zig
1 src/bun.js/RuntimeTranspilerStore.zig
1 src/bun.js/bindings/JSRef.zig
1 src/bake/DevServer.zig
```
Files that remove lines:
```
-1 src/test/recover.zig
-1 src/sql/postgres/SocketMonitor.zig
-1 src/sql/mysql/MySQLRequestQueue.zig
-1 src/sourcemap/CodeCoverage.zig
-1 src/css/values/color_js.zig
-1 src/compile_target.zig
-1 src/bundler/linker_context/convertStmtsForChunk.zig
-1 src/bundler/bundle_v2.zig
-1 src/bun.js/webcore/blob/read_file.zig
-1 src/ast/base.zig
-2 src/sql/postgres/protocol/ArrayList.zig
-2 src/shell/builtin/mkdir.zig
-2 src/install/PackageManager/patchPackage.zig
-2 src/install/PackageManager/PackageManagerDirectories.zig
-2 src/fmt.zig
-2 src/css/declaration.zig
-2 src/css/css_parser.zig
-2 src/collections/baby_list.zig
-2 src/bun.js/bindings/ZigStackFrame.zig
-2 src/ast/E.zig
-3 src/StandaloneModuleGraph.zig
-3 src/deps/picohttp.zig
-3 src/deps/libuv.zig
-3 src/btjs.zig
-4 src/threading/Futex.zig
-4 src/shell/builtin/touch.zig
-4 src/meta.zig
-4 src/install/lockfile.zig
-4 src/css/selectors/parser.zig
-5 src/shell/interpreter.zig
-5 src/css/error.zig
-5 src/bun.js/web_worker.zig
-5 src/bun.js.zig
-6 src/cli/test_command.zig
-6 src/bun.js/VirtualMachine.zig
-6 src/bun.js/uuid.zig
-6 src/bun.js/bindings/JSValue.zig
-9 src/bun.js/test/pretty_format.zig
-9 src/bun.js/api/BunObject.zig
-14 src/install/install_binding.zig
-14 src/fd.zig
-14 src/bun.js/node/path.zig
-14 scripts/pack-codegen-for-zig-team.sh
-17 src/bun.js/test/diff_format.zig
```
`git diff --numstat origin/main...HEAD | awk '{ print ($1-$2)"\t"$3 }' |
sort -rn`
---------
Co-authored-by: autofix-ci[bot] <114827586+autofix-ci[bot]@users.noreply.github.com>
Co-authored-by: Dylan Conway <dylan.conway567@gmail.com>
Co-authored-by: Meghan Denny <meghan@bun.com>
Co-authored-by: tayor.fish <contact@taylor.fish>
532 lines
18 KiB
Zig
532 lines
18 KiB
Zig
const OutputFile = @This();
|
|
|
|
// Instead of keeping files in-memory, we:
|
|
// 1. Write directly to disk
|
|
// 2. (Optional) move the file to the destination
|
|
// This saves us from allocating a buffer
|
|
|
|
loader: Loader,
|
|
input_loader: Loader = .js,
|
|
src_path: Fs.Path,
|
|
value: Value,
|
|
size: usize = 0,
|
|
size_without_sourcemap: usize = 0,
|
|
hash: u64 = 0,
|
|
is_executable: bool = false,
|
|
source_map_index: u32 = std.math.maxInt(u32),
|
|
bytecode_index: u32 = std.math.maxInt(u32),
|
|
output_kind: jsc.API.BuildArtifact.OutputKind,
|
|
/// Relative
|
|
dest_path: []const u8 = "",
|
|
side: ?bun.bake.Side,
|
|
/// This is only set for the JS bundle, and not files associated with an
|
|
/// entrypoint like sourcemaps and bytecode
|
|
entry_point_index: ?u32,
|
|
referenced_css_chunks: []const Index = &.{},
|
|
source_index: Index.Optional = .none,
|
|
bake_extra: BakeExtra = .{},
|
|
|
|
pub const zero_value = OutputFile{
|
|
.loader = .file,
|
|
.src_path = Fs.Path.init(""),
|
|
.value = .noop,
|
|
.output_kind = .chunk,
|
|
.side = null,
|
|
.entry_point_index = null,
|
|
};
|
|
|
|
pub const BakeExtra = struct {
|
|
is_route: bool = false,
|
|
fully_static: bool = false,
|
|
bake_is_runtime: bool = false,
|
|
};
|
|
|
|
pub const Index = bun.GenericIndex(u32, OutputFile);
|
|
|
|
pub fn deinit(this: *OutputFile) void {
|
|
this.value.deinit();
|
|
|
|
bun.default_allocator.free(this.src_path.text);
|
|
bun.default_allocator.free(this.dest_path);
|
|
bun.default_allocator.free(this.referenced_css_chunks);
|
|
}
|
|
|
|
// Depending on:
|
|
// - The target
|
|
// - The number of open file handles
|
|
// - Whether or not a file of the same name exists
|
|
// We may use a different system call
|
|
pub const FileOperation = struct {
|
|
pathname: string,
|
|
fd: FileDescriptorType = bun.invalid_fd,
|
|
dir: FileDescriptorType = bun.invalid_fd,
|
|
is_tmpdir: bool = false,
|
|
is_outdir: bool = false,
|
|
close_handle_on_complete: bool = false,
|
|
autowatch: bool = true,
|
|
|
|
pub fn fromFile(fd: bun.FD, pathname: string) FileOperation {
|
|
return .{
|
|
.fd = fd,
|
|
.pathname = pathname,
|
|
};
|
|
}
|
|
|
|
pub fn getPathname(file: *const FileOperation) string {
|
|
if (file.is_tmpdir) {
|
|
return resolve_path.joinAbs(@TypeOf(Fs.FileSystem.instance.fs).tmpdir_path, .auto, file.pathname);
|
|
} else {
|
|
return file.pathname;
|
|
}
|
|
}
|
|
};
|
|
|
|
pub const Kind = enum {
|
|
move,
|
|
copy,
|
|
noop,
|
|
buffer,
|
|
pending,
|
|
saved,
|
|
};
|
|
|
|
// TODO: document how and why all variants of this union(enum) are used,
|
|
// specifically .move and .copy; the new bundler has to load files in memory
|
|
// in order to hash them, so i think it uses .buffer for those
|
|
pub const Value = union(Kind) {
|
|
move: FileOperation,
|
|
copy: FileOperation,
|
|
noop: u0,
|
|
buffer: struct {
|
|
allocator: std.mem.Allocator,
|
|
bytes: []const u8,
|
|
},
|
|
pending: resolver.Result,
|
|
saved: SavedFile,
|
|
|
|
pub fn deinit(this: *Value) void {
|
|
switch (this.*) {
|
|
.buffer => |buf| {
|
|
buf.allocator.free(buf.bytes);
|
|
},
|
|
.saved => {},
|
|
.move => {},
|
|
.copy => {},
|
|
.noop => {},
|
|
.pending => {},
|
|
}
|
|
}
|
|
|
|
pub fn asSlice(v: Value) []const u8 {
|
|
return switch (v) {
|
|
.buffer => |buf| buf.bytes,
|
|
else => "",
|
|
};
|
|
}
|
|
|
|
pub fn toBunString(v: Value) bun.String {
|
|
return switch (v) {
|
|
.noop => bun.String.empty,
|
|
.buffer => |buf| {
|
|
// Use ExternalStringImpl to avoid cloning the string, at
|
|
// the cost of allocating space to remember the allocator.
|
|
const FreeContext = struct {
|
|
allocator: std.mem.Allocator,
|
|
|
|
fn onFree(ctx: *@This(), buffer: *anyopaque, len: u32) callconv(.c) void {
|
|
ctx.allocator.free(@as([*]u8, @ptrCast(buffer))[0..len]);
|
|
bun.destroy(ctx);
|
|
}
|
|
};
|
|
return bun.String.createExternal(
|
|
*FreeContext,
|
|
buf.bytes,
|
|
true,
|
|
bun.new(FreeContext, .{ .allocator = buf.allocator }),
|
|
FreeContext.onFree,
|
|
);
|
|
},
|
|
.pending => unreachable,
|
|
else => |tag| bun.todoPanic(@src(), "handle .{s}", .{@tagName(tag)}),
|
|
};
|
|
}
|
|
};
|
|
|
|
pub const SavedFile = struct {
|
|
pub fn toJS(
|
|
globalThis: *jsc.JSGlobalObject,
|
|
path: []const u8,
|
|
byte_size: usize,
|
|
) jsc.JSValue {
|
|
const mime_type = globalThis.bunVM().mimeType(path);
|
|
const store = jsc.WebCore.Blob.Store.initFile(
|
|
jsc.Node.PathOrFileDescriptor{
|
|
.path = jsc.Node.PathLike{
|
|
.string = bun.PathString.init(path),
|
|
},
|
|
},
|
|
mime_type,
|
|
bun.default_allocator,
|
|
) catch unreachable;
|
|
|
|
var blob = bun.default_allocator.create(jsc.WebCore.Blob) catch unreachable;
|
|
blob.* = jsc.WebCore.Blob.initWithStore(store, globalThis);
|
|
if (mime_type) |mime| {
|
|
blob.content_type = mime.value;
|
|
}
|
|
blob.size = @as(jsc.WebCore.Blob.SizeType, @truncate(byte_size));
|
|
blob.allocator = bun.default_allocator;
|
|
return blob.toJS(globalThis);
|
|
}
|
|
};
|
|
|
|
pub fn initPending(loader: Loader, pending: resolver.Result) OutputFile {
|
|
return .{
|
|
.loader = loader,
|
|
.src_path = pending.pathConst().?.*,
|
|
.size = 0,
|
|
.value = .{ .pending = pending },
|
|
};
|
|
}
|
|
|
|
pub fn initFile(file: std.fs.File, pathname: string, size: usize) OutputFile {
|
|
return .{
|
|
.loader = .file,
|
|
.src_path = Fs.Path.init(pathname),
|
|
.size = size,
|
|
.value = .{ .copy = FileOperation.fromFile(file.handle, pathname) },
|
|
};
|
|
}
|
|
|
|
pub fn initFileWithDir(file: std.fs.File, pathname: string, size: usize, dir: std.fs.Dir) OutputFile {
|
|
var res = initFile(file, pathname, size);
|
|
res.value.copy.dir_handle = .fromStdDir(dir);
|
|
return res;
|
|
}
|
|
|
|
pub const Options = struct {
|
|
loader: Loader,
|
|
input_loader: Loader,
|
|
hash: ?u64 = null,
|
|
source_map_index: ?u32 = null,
|
|
bytecode_index: ?u32 = null,
|
|
output_path: string,
|
|
source_index: Index.Optional = .none,
|
|
size: ?usize = null,
|
|
input_path: []const u8 = "",
|
|
display_size: u32 = 0,
|
|
output_kind: jsc.API.BuildArtifact.OutputKind,
|
|
is_executable: bool,
|
|
data: union(enum) {
|
|
buffer: struct {
|
|
allocator: std.mem.Allocator,
|
|
data: []const u8,
|
|
},
|
|
file: struct {
|
|
file: std.fs.File,
|
|
size: usize,
|
|
dir: std.fs.Dir,
|
|
},
|
|
saved: usize,
|
|
},
|
|
side: ?bun.bake.Side,
|
|
entry_point_index: ?u32,
|
|
referenced_css_chunks: []const Index = &.{},
|
|
bake_extra: BakeExtra = .{},
|
|
};
|
|
|
|
pub fn init(options: Options) OutputFile {
|
|
return .{
|
|
.loader = options.loader,
|
|
.input_loader = options.input_loader,
|
|
.src_path = Fs.Path.init(options.input_path),
|
|
.dest_path = options.output_path,
|
|
.source_index = options.source_index,
|
|
.size = options.size orelse switch (options.data) {
|
|
.buffer => |buf| buf.data.len,
|
|
.file => |file| file.size,
|
|
.saved => 0,
|
|
},
|
|
.size_without_sourcemap = options.display_size,
|
|
.hash = options.hash orelse 0,
|
|
.output_kind = options.output_kind,
|
|
.bytecode_index = options.bytecode_index orelse std.math.maxInt(u32),
|
|
.source_map_index = options.source_map_index orelse std.math.maxInt(u32),
|
|
.is_executable = options.is_executable,
|
|
.value = switch (options.data) {
|
|
.buffer => |buffer| Value{ .buffer = .{ .allocator = buffer.allocator, .bytes = buffer.data } },
|
|
.file => |file| Value{
|
|
.copy = brk: {
|
|
var op = FileOperation.fromFile(.fromStdFile(file.file), options.output_path);
|
|
op.dir = .fromStdDir(file.dir);
|
|
break :brk op;
|
|
},
|
|
},
|
|
.saved => Value{ .saved = .{} },
|
|
},
|
|
.side = options.side,
|
|
.entry_point_index = options.entry_point_index,
|
|
.referenced_css_chunks = options.referenced_css_chunks,
|
|
.bake_extra = options.bake_extra,
|
|
};
|
|
}
|
|
|
|
pub fn writeToDisk(f: OutputFile, root_dir: std.fs.Dir, root_dir_path: []const u8) !void {
|
|
switch (f.value) {
|
|
.noop => {},
|
|
.saved => {
|
|
// already written to disk
|
|
},
|
|
.buffer => |value| {
|
|
var rel_path = f.dest_path;
|
|
if (f.dest_path.len > root_dir_path.len) {
|
|
rel_path = resolve_path.relative(root_dir_path, f.dest_path);
|
|
if (std.fs.path.dirname(rel_path)) |parent| {
|
|
if (parent.len > root_dir_path.len) {
|
|
try root_dir.makePath(parent);
|
|
}
|
|
}
|
|
}
|
|
|
|
var path_buf: bun.PathBuffer = undefined;
|
|
_ = try jsc.Node.fs.NodeFS.writeFileWithPathBuffer(&path_buf, .{
|
|
.data = .{ .buffer = .{
|
|
.buffer = .{
|
|
.ptr = @constCast(value.bytes.ptr),
|
|
.len = value.bytes.len,
|
|
.byte_len = value.bytes.len,
|
|
},
|
|
} },
|
|
.encoding = .buffer,
|
|
.mode = if (f.is_executable) 0o755 else 0o644,
|
|
.dirfd = .fromStdDir(root_dir),
|
|
.file = .{ .path = .{
|
|
.string = bun.PathString.init(rel_path),
|
|
} },
|
|
}).unwrap();
|
|
},
|
|
.move => |value| {
|
|
try f.moveTo(root_dir_path, value.pathname, .fromStdDir(root_dir));
|
|
},
|
|
.copy => |value| {
|
|
try f.copyTo(root_dir_path, value.pathname, .fromStdDir(root_dir));
|
|
},
|
|
.pending => unreachable,
|
|
}
|
|
}
|
|
|
|
pub fn moveTo(file: *const OutputFile, _: string, rel_path: []const u8, dir: FileDescriptorType) !void {
|
|
try bun.sys.moveFileZ(file.value.move.dir, bun.sliceTo(&(try std.posix.toPosixPath(file.value.move.getPathname())), 0), dir, bun.sliceTo(&(try std.posix.toPosixPath(rel_path)), 0));
|
|
}
|
|
|
|
pub fn copyTo(file: *const OutputFile, _: string, rel_path: []const u8, dir: FileDescriptorType) !void {
|
|
const fd_out = bun.FD.fromStdFile(try dir.stdDir().createFile(rel_path, .{}));
|
|
var do_close = false;
|
|
const fd_in = bun.FD.fromStdFile(try std.fs.cwd().openFile(file.src_path.text, .{ .mode = .read_only }));
|
|
|
|
if (Environment.isWindows) {
|
|
do_close = Fs.FileSystem.instance.fs.needToCloseFiles();
|
|
|
|
// use paths instead of bun.getFdPathW()
|
|
@panic("TODO windows");
|
|
}
|
|
|
|
defer {
|
|
if (do_close) {
|
|
fd_out.close();
|
|
fd_in.close();
|
|
}
|
|
}
|
|
|
|
try bun.copyFile(fd_in, fd_out).unwrap();
|
|
}
|
|
|
|
pub fn toJS(
|
|
this: *OutputFile,
|
|
owned_pathname: ?[]const u8,
|
|
globalObject: *jsc.JSGlobalObject,
|
|
) bun.jsc.JSValue {
|
|
return switch (this.value) {
|
|
.move, .pending => @panic("Unexpected pending output file"),
|
|
.noop => .js_undefined,
|
|
.copy => |copy| brk: {
|
|
const file_blob = jsc.WebCore.Blob.Store.initFile(
|
|
if (copy.fd.isValid())
|
|
jsc.Node.PathOrFileDescriptor{
|
|
.fd = copy.fd,
|
|
}
|
|
else
|
|
jsc.Node.PathOrFileDescriptor{
|
|
.path = jsc.Node.PathLike{ .string = bun.PathString.init(globalObject.allocator().dupe(u8, copy.pathname) catch unreachable) },
|
|
},
|
|
this.loader.toMimeType(&.{owned_pathname orelse ""}),
|
|
globalObject.allocator(),
|
|
) catch |err| {
|
|
Output.panic("error: Unable to create file blob: \"{s}\"", .{@errorName(err)});
|
|
};
|
|
|
|
var build_output = bun.new(jsc.API.BuildArtifact, .{
|
|
.blob = jsc.WebCore.Blob.initWithStore(file_blob, globalObject),
|
|
.hash = this.hash,
|
|
.loader = this.input_loader,
|
|
.output_kind = this.output_kind,
|
|
.path = bun.default_allocator.dupe(u8, copy.pathname) catch @panic("Failed to allocate path"),
|
|
});
|
|
|
|
this.value = .{
|
|
.buffer = .{
|
|
.allocator = bun.default_allocator,
|
|
.bytes = &.{},
|
|
},
|
|
};
|
|
|
|
break :brk build_output.toJS(globalObject);
|
|
},
|
|
.saved => brk: {
|
|
var build_output = bun.default_allocator.create(jsc.API.BuildArtifact) catch @panic("Unable to allocate Artifact");
|
|
const path_to_use = owned_pathname orelse this.src_path.text;
|
|
|
|
const file_blob = jsc.WebCore.Blob.Store.initFile(
|
|
jsc.Node.PathOrFileDescriptor{
|
|
.path = jsc.Node.PathLike{ .string = bun.PathString.init(owned_pathname orelse (bun.default_allocator.dupe(u8, this.src_path.text) catch unreachable)) },
|
|
},
|
|
this.loader.toMimeType(&.{owned_pathname orelse ""}),
|
|
globalObject.allocator(),
|
|
) catch |err| {
|
|
Output.panic("error: Unable to create file blob: \"{s}\"", .{@errorName(err)});
|
|
};
|
|
|
|
this.value = .{
|
|
.buffer = .{
|
|
.allocator = bun.default_allocator,
|
|
.bytes = &.{},
|
|
},
|
|
};
|
|
|
|
build_output.* = jsc.API.BuildArtifact{
|
|
.blob = jsc.WebCore.Blob.initWithStore(file_blob, globalObject),
|
|
.hash = this.hash,
|
|
.loader = this.input_loader,
|
|
.output_kind = this.output_kind,
|
|
.path = bun.default_allocator.dupe(u8, path_to_use) catch @panic("Failed to allocate path"),
|
|
};
|
|
|
|
break :brk build_output.toJS(globalObject);
|
|
},
|
|
.buffer => |buffer| brk: {
|
|
var blob = jsc.WebCore.Blob.init(@constCast(buffer.bytes), buffer.allocator, globalObject);
|
|
if (blob.store) |store| {
|
|
store.mime_type = this.loader.toMimeType(&.{owned_pathname orelse ""});
|
|
blob.content_type = store.mime_type.value;
|
|
} else {
|
|
blob.content_type = this.loader.toMimeType(&.{owned_pathname orelse ""}).value;
|
|
}
|
|
|
|
blob.size = @as(jsc.WebCore.Blob.SizeType, @truncate(buffer.bytes.len));
|
|
|
|
var build_output = bun.default_allocator.create(jsc.API.BuildArtifact) catch @panic("Unable to allocate Artifact");
|
|
build_output.* = jsc.API.BuildArtifact{
|
|
.blob = blob,
|
|
.hash = this.hash,
|
|
.loader = this.input_loader,
|
|
.output_kind = this.output_kind,
|
|
.path = owned_pathname orelse bun.default_allocator.dupe(u8, this.src_path.text) catch unreachable,
|
|
};
|
|
|
|
this.value = .{
|
|
.buffer = .{
|
|
.allocator = bun.default_allocator,
|
|
.bytes = &.{},
|
|
},
|
|
};
|
|
|
|
break :brk build_output.toJS(globalObject);
|
|
},
|
|
};
|
|
}
|
|
|
|
pub fn toBlob(
|
|
this: *OutputFile,
|
|
allocator: std.mem.Allocator,
|
|
globalThis: *jsc.JSGlobalObject,
|
|
) !jsc.WebCore.Blob {
|
|
return switch (this.value) {
|
|
.move, .pending => @panic("Unexpected pending output file"),
|
|
.noop => @panic("Cannot convert noop output file to blob"),
|
|
.copy => |copy| brk: {
|
|
const file_blob = try jsc.WebCore.Blob.Store.initFile(
|
|
if (copy.fd.isValid())
|
|
jsc.Node.PathOrFileDescriptor{
|
|
.fd = copy.fd,
|
|
}
|
|
else
|
|
jsc.Node.PathOrFileDescriptor{
|
|
.path = jsc.Node.PathLike{ .string = bun.PathString.init(allocator.dupe(u8, copy.pathname) catch unreachable) },
|
|
},
|
|
this.loader.toMimeType(&.{ this.dest_path, this.src_path.text }),
|
|
allocator,
|
|
);
|
|
|
|
this.value = .{
|
|
.buffer = .{
|
|
.allocator = bun.default_allocator,
|
|
.bytes = &.{},
|
|
},
|
|
};
|
|
|
|
break :brk jsc.WebCore.Blob.initWithStore(file_blob, globalThis);
|
|
},
|
|
.saved => brk: {
|
|
const file_blob = try jsc.WebCore.Blob.Store.initFile(
|
|
jsc.Node.PathOrFileDescriptor{
|
|
.path = jsc.Node.PathLike{ .string = bun.PathString.init(allocator.dupe(u8, this.src_path.text) catch unreachable) },
|
|
},
|
|
this.loader.toMimeType(&.{ this.dest_path, this.src_path.text }),
|
|
allocator,
|
|
);
|
|
|
|
this.value = .{
|
|
.buffer = .{
|
|
.allocator = bun.default_allocator,
|
|
.bytes = &.{},
|
|
},
|
|
};
|
|
|
|
break :brk jsc.WebCore.Blob.initWithStore(file_blob, globalThis);
|
|
},
|
|
.buffer => |buffer| brk: {
|
|
var blob = jsc.WebCore.Blob.init(@constCast(buffer.bytes), buffer.allocator, globalThis);
|
|
if (blob.store) |store| {
|
|
store.mime_type = this.loader.toMimeType(&.{ this.dest_path, this.src_path.text });
|
|
blob.content_type = store.mime_type.value;
|
|
} else {
|
|
blob.content_type = this.loader.toMimeType(&.{ this.dest_path, this.src_path.text }).value;
|
|
}
|
|
|
|
this.value = .{
|
|
.buffer = .{
|
|
.allocator = bun.default_allocator,
|
|
.bytes = &.{},
|
|
},
|
|
};
|
|
|
|
blob.size = @as(jsc.WebCore.Blob.SizeType, @truncate(buffer.bytes.len));
|
|
break :brk blob;
|
|
},
|
|
};
|
|
}
|
|
|
|
const string = []const u8;
|
|
|
|
const resolve_path = @import("./resolver/resolve_path.zig");
|
|
const resolver = @import("./resolver/resolver.zig");
|
|
const std = @import("std");
|
|
const Loader = @import("./options.zig").Loader;
|
|
|
|
const bun = @import("bun");
|
|
const Environment = bun.Environment;
|
|
const FileDescriptorType = bun.FileDescriptor;
|
|
const Fs = bun.fs;
|
|
const jsc = bun.jsc;
|
|
const Output = bun.Global.Output;
|