mirror of
https://github.com/oven-sh/bun
synced 2026-02-09 18:38:55 +00:00
Fixes ENG-21287
Build times, from `bun run build && echo '//' >> src/main.zig && time
bun run build`
|Platform|0.14.1|0.15.2|Speedup|
|-|-|-|-|
|macos debug asan|126.90s|106.27s|1.19x|
|macos debug noasan|60.62s|50.85s|1.19x|
|linux debug asan|292.77s|241.45s|1.21x|
|linux debug noasan|146.58s|130.94s|1.12x|
|linux debug use_llvm=false|n/a|78.27s|1.87x|
|windows debug asan|177.13s|142.55s|1.24x|
Runtime performance:
- next build memory usage may have gone up by 5%. Otherwise seems the
same. Some code with writers may have gotten slower, especially one
instance of a counting writer and a few instances of unbuffered writers
that now have vtable overhead.
- File size reduced by 800kb (from 100.2mb to 99.4mb)
Improvements:
- `@export` hack is no longer needed for watch
- native x86_64 backend for linux builds faster. to use it, set use_llvm
false and no_link_obj false. also set `ASAN_OPTIONS=detect_leaks=0`
otherwise it will spam the output with tens of thousands of lines of
debug info errors. may need to use the zig lldb fork for debugging.
- zig test-obj, which we will be able to use for zig unit tests
Still an issue:
- false 'dependency loop' errors remain in watch mode
- watch mode crashes observed
Follow-up:
- [ ] search `comptime Writer: type` and `comptime W: type` and remove
- [ ] remove format_mode in our zig fork
- [ ] remove deprecated.zig autoFormatLabelFallback
- [ ] remove deprecated.zig autoFormatLabel
- [ ] remove deprecated.BufferedWriter and BufferedReader
- [ ] remove override_no_export_cpp_apis as it is no longer needed
- [ ] css Parser(W) -> Parser, and remove all the comptime writer: type
params
- [ ] remove deprecated writer fully
Files that add lines:
```
649 src/deprecated.zig
167 scripts/pack-codegen-for-zig-team.ts
54 scripts/cleartrace-impl.js
46 scripts/cleartrace.ts
43 src/windows.zig
18 src/fs.zig
17 src/bun.js/ConsoleObject.zig
16 src/output.zig
12 src/bun.js/test/debug.zig
12 src/bun.js/node/node_fs.zig
8 src/env_loader.zig
7 src/css/printer.zig
7 src/cli/init_command.zig
7 src/bun.js/node.zig
6 src/string/escapeRegExp.zig
6 src/install/PnpmMatcher.zig
5 src/bun.js/webcore/Blob.zig
4 src/crash_handler.zig
4 src/bun.zig
3 src/install/lockfile/bun.lock.zig
3 src/cli/update_interactive_command.zig
3 src/cli/pack_command.zig
3 build.zig
2 src/Progress.zig
2 src/install/lockfile/lockfile_json_stringify_for_debugging.zig
2 src/css/small_list.zig
2 src/bun.js/webcore/prompt.zig
1 test/internal/ban-words.test.ts
1 test/internal/ban-limits.json
1 src/watcher/WatcherTrace.zig
1 src/transpiler.zig
1 src/shell/builtin/cp.zig
1 src/js_printer.zig
1 src/io/PipeReader.zig
1 src/install/bin.zig
1 src/css/selectors/selector.zig
1 src/cli/run_command.zig
1 src/bun.js/RuntimeTranspilerStore.zig
1 src/bun.js/bindings/JSRef.zig
1 src/bake/DevServer.zig
```
Files that remove lines:
```
-1 src/test/recover.zig
-1 src/sql/postgres/SocketMonitor.zig
-1 src/sql/mysql/MySQLRequestQueue.zig
-1 src/sourcemap/CodeCoverage.zig
-1 src/css/values/color_js.zig
-1 src/compile_target.zig
-1 src/bundler/linker_context/convertStmtsForChunk.zig
-1 src/bundler/bundle_v2.zig
-1 src/bun.js/webcore/blob/read_file.zig
-1 src/ast/base.zig
-2 src/sql/postgres/protocol/ArrayList.zig
-2 src/shell/builtin/mkdir.zig
-2 src/install/PackageManager/patchPackage.zig
-2 src/install/PackageManager/PackageManagerDirectories.zig
-2 src/fmt.zig
-2 src/css/declaration.zig
-2 src/css/css_parser.zig
-2 src/collections/baby_list.zig
-2 src/bun.js/bindings/ZigStackFrame.zig
-2 src/ast/E.zig
-3 src/StandaloneModuleGraph.zig
-3 src/deps/picohttp.zig
-3 src/deps/libuv.zig
-3 src/btjs.zig
-4 src/threading/Futex.zig
-4 src/shell/builtin/touch.zig
-4 src/meta.zig
-4 src/install/lockfile.zig
-4 src/css/selectors/parser.zig
-5 src/shell/interpreter.zig
-5 src/css/error.zig
-5 src/bun.js/web_worker.zig
-5 src/bun.js.zig
-6 src/cli/test_command.zig
-6 src/bun.js/VirtualMachine.zig
-6 src/bun.js/uuid.zig
-6 src/bun.js/bindings/JSValue.zig
-9 src/bun.js/test/pretty_format.zig
-9 src/bun.js/api/BunObject.zig
-14 src/install/install_binding.zig
-14 src/fd.zig
-14 src/bun.js/node/path.zig
-14 scripts/pack-codegen-for-zig-team.sh
-17 src/bun.js/test/diff_format.zig
```
`git diff --numstat origin/main...HEAD | awk '{ print ($1-$2)"\t"$3 }' |
sort -rn`
---------
Co-authored-by: autofix-ci[bot] <114827586+autofix-ci[bot]@users.noreply.github.com>
Co-authored-by: Dylan Conway <dylan.conway567@gmail.com>
Co-authored-by: Meghan Denny <meghan@bun.com>
Co-authored-by: tayor.fish <contact@taylor.fish>
335 lines
12 KiB
Zig
335 lines
12 KiB
Zig
pub const Set = struct {
|
|
js: JavaScript,
|
|
fs: Fs,
|
|
json: Json,
|
|
|
|
pub fn init(allocator: std.mem.Allocator) Set {
|
|
return Set{
|
|
.js = JavaScript.init(allocator),
|
|
.fs = Fs{
|
|
.shared_buffer = MutableString.init(allocator, 0) catch unreachable,
|
|
.macro_shared_buffer = MutableString.init(allocator, 0) catch unreachable,
|
|
},
|
|
.json = Json{},
|
|
};
|
|
}
|
|
};
|
|
const debug = Output.scoped(.fs, .visible);
|
|
pub const Fs = struct {
|
|
pub const Entry = struct {
|
|
contents: string,
|
|
fd: StoredFileDescriptorType,
|
|
/// When `contents` comes from a native plugin, this field is populated
|
|
/// with information on how to free it.
|
|
external_free_function: ExternalFreeFunction = .none,
|
|
|
|
pub const ExternalFreeFunction = struct {
|
|
ctx: ?*anyopaque,
|
|
function: ?*const fn (?*anyopaque) callconv(.c) void,
|
|
|
|
pub const none: ExternalFreeFunction = .{ .ctx = null, .function = null };
|
|
|
|
pub fn call(this: *const @This()) void {
|
|
if (this.function) |func| {
|
|
func(this.ctx);
|
|
}
|
|
}
|
|
};
|
|
|
|
pub fn deinit(entry: *Entry, allocator: std.mem.Allocator) void {
|
|
if (entry.external_free_function.function) |func| {
|
|
func(entry.external_free_function.ctx);
|
|
} else if (entry.contents.len > 0) {
|
|
allocator.free(entry.contents);
|
|
entry.contents = "";
|
|
}
|
|
}
|
|
|
|
pub fn closeFD(entry: *Entry) ?bun.sys.Error {
|
|
if (entry.fd.isValid()) {
|
|
defer entry.fd = .invalid;
|
|
return entry.fd.closeAllowingBadFileDescriptor(@returnAddress());
|
|
}
|
|
return null;
|
|
}
|
|
};
|
|
|
|
shared_buffer: MutableString,
|
|
macro_shared_buffer: MutableString,
|
|
|
|
use_alternate_source_cache: bool = false,
|
|
stream: bool = false,
|
|
|
|
// When we are in a macro, the shared buffer may be in use by the in-progress macro.
|
|
// so we have to dynamically switch it out.
|
|
pub inline fn sharedBuffer(this: *Fs) *MutableString {
|
|
return if (!this.use_alternate_source_cache)
|
|
&this.shared_buffer
|
|
else
|
|
&this.macro_shared_buffer;
|
|
}
|
|
|
|
/// When we need to suspend/resume something that has pointers into the shared buffer, we need to
|
|
/// switch out the shared buffer so that it is not in use
|
|
/// The caller must
|
|
pub fn resetSharedBuffer(this: *Fs, buffer: *MutableString) void {
|
|
if (buffer == &this.shared_buffer) {
|
|
this.shared_buffer = MutableString.initEmpty(bun.default_allocator);
|
|
} else if (buffer == &this.macro_shared_buffer) {
|
|
this.macro_shared_buffer = MutableString.initEmpty(bun.default_allocator);
|
|
} else {
|
|
bun.unreachablePanic("resetSharedBuffer: invalid buffer", .{});
|
|
}
|
|
}
|
|
|
|
pub fn deinit(c: *Fs) void {
|
|
var iter = c.entries.iterator();
|
|
while (iter.next()) |entry| {
|
|
entry.value.deinit(c.entries.allocator);
|
|
}
|
|
c.entries.deinit();
|
|
}
|
|
|
|
pub fn readFileShared(
|
|
this: *Fs,
|
|
_fs: *fs.FileSystem,
|
|
path: [:0]const u8,
|
|
cached_file_descriptor: ?StoredFileDescriptorType,
|
|
shared: *MutableString,
|
|
) !Entry {
|
|
var rfs = _fs.fs;
|
|
|
|
const file_handle: std.fs.File = if (cached_file_descriptor) |fd| handle: {
|
|
const handle = std.fs.File{ .handle = fd };
|
|
try handle.seekTo(0);
|
|
break :handle handle;
|
|
} else try std.fs.openFileAbsoluteZ(path, .{ .mode = .read_only });
|
|
|
|
defer {
|
|
if (rfs.needToCloseFiles() and cached_file_descriptor == null) {
|
|
file_handle.close();
|
|
}
|
|
}
|
|
|
|
const file = if (this.stream)
|
|
rfs.readFileWithHandle(path, null, file_handle, true, shared, true) catch |err| {
|
|
if (comptime Environment.isDebug) {
|
|
Output.printError("{s}: readFile error -- {s}", .{ path, @errorName(err) });
|
|
}
|
|
return err;
|
|
}
|
|
else
|
|
rfs.readFileWithHandle(path, null, file_handle, true, shared, false) catch |err| {
|
|
if (comptime Environment.isDebug) {
|
|
Output.printError("{s}: readFile error -- {s}", .{ path, @errorName(err) });
|
|
}
|
|
return err;
|
|
};
|
|
|
|
return Entry{
|
|
.contents = file.contents,
|
|
.fd = if (FeatureFlags.store_file_descriptors) file_handle.handle else 0,
|
|
};
|
|
}
|
|
|
|
pub fn readFile(
|
|
c: *Fs,
|
|
_fs: *fs.FileSystem,
|
|
path: string,
|
|
dirname_fd: StoredFileDescriptorType,
|
|
comptime use_shared_buffer: bool,
|
|
_file_handle: ?StoredFileDescriptorType,
|
|
) !Entry {
|
|
return c.readFileWithAllocator(bun.default_allocator, _fs, path, dirname_fd, use_shared_buffer, _file_handle);
|
|
}
|
|
|
|
pub fn readFileWithAllocator(
|
|
c: *Fs,
|
|
allocator: std.mem.Allocator,
|
|
_fs: *fs.FileSystem,
|
|
path: string,
|
|
dirname_fd: StoredFileDescriptorType,
|
|
comptime use_shared_buffer: bool,
|
|
_file_handle: ?StoredFileDescriptorType,
|
|
) !Entry {
|
|
var rfs = _fs.fs;
|
|
|
|
var file_handle: std.fs.File = if (_file_handle) |__file| __file.stdFile() else undefined;
|
|
|
|
if (_file_handle == null) {
|
|
if (FeatureFlags.store_file_descriptors and dirname_fd.isValid()) {
|
|
file_handle = (bun.sys.openatA(dirname_fd, std.fs.path.basename(path), bun.O.RDONLY, 0).unwrap() catch |err| brk: {
|
|
switch (err) {
|
|
error.ENOENT => {
|
|
const handle = try bun.openFile(path, .{ .mode = .read_only });
|
|
Output.prettyErrorln(
|
|
"<r><d>Internal error: directory mismatch for directory \"{s}\", fd {f}<r>. You don't need to do anything, but this indicates a bug.",
|
|
.{ path, dirname_fd },
|
|
);
|
|
break :brk bun.FD.fromStdFile(handle);
|
|
},
|
|
else => return err,
|
|
}
|
|
}).stdFile();
|
|
} else {
|
|
file_handle = try bun.openFile(path, .{ .mode = .read_only });
|
|
}
|
|
} else {
|
|
try file_handle.seekTo(0);
|
|
}
|
|
|
|
if (comptime !Environment.isWindows) // skip on Windows because NTCreateFile will do it.
|
|
debug("openat({f}, {s}) = {f}", .{ dirname_fd, path, bun.FD.fromStdFile(file_handle) });
|
|
|
|
const will_close = rfs.needToCloseFiles() and _file_handle == null;
|
|
defer {
|
|
if (will_close) {
|
|
debug("readFileWithAllocator close({f})", .{bun.fs.printHandle(file_handle.handle)});
|
|
file_handle.close();
|
|
}
|
|
}
|
|
|
|
const file = if (c.stream)
|
|
rfs.readFileWithHandleAndAllocator(allocator, path, null, file_handle, use_shared_buffer, c.sharedBuffer(), true) catch |err| {
|
|
if (Environment.isDebug) {
|
|
Output.printError("{s}: readFile error -- {s}", .{ path, @errorName(err) });
|
|
}
|
|
return err;
|
|
}
|
|
else
|
|
rfs.readFileWithHandleAndAllocator(allocator, path, null, file_handle, use_shared_buffer, c.sharedBuffer(), false) catch |err| {
|
|
if (Environment.isDebug) {
|
|
Output.printError("{s}: readFile error -- {s}", .{ path, @errorName(err) });
|
|
}
|
|
return err;
|
|
};
|
|
|
|
return Entry{
|
|
.contents = file.contents,
|
|
.fd = if (FeatureFlags.store_file_descriptors and !will_close) .fromStdFile(file_handle) else bun.invalid_fd,
|
|
};
|
|
}
|
|
};
|
|
|
|
pub const Css = struct {
|
|
pub const Entry = struct {};
|
|
pub const Result = struct {
|
|
ok: bool,
|
|
value: void,
|
|
};
|
|
pub fn parse(_: *@This(), _: *logger.Log, _: logger.Source) !Result {
|
|
Global.notimpl();
|
|
}
|
|
};
|
|
|
|
pub const JavaScript = struct {
|
|
pub const Result = js_ast.Result;
|
|
|
|
pub fn init(_: std.mem.Allocator) JavaScript {
|
|
return JavaScript{};
|
|
}
|
|
// For now, we're not going to cache JavaScript ASTs.
|
|
// It's probably only relevant when bundling for production.
|
|
pub fn parse(
|
|
_: *const @This(),
|
|
allocator: std.mem.Allocator,
|
|
opts: js_parser.Parser.Options,
|
|
defines: *Define,
|
|
log: *logger.Log,
|
|
source: *const logger.Source,
|
|
) anyerror!?js_ast.Result {
|
|
var temp_log = logger.Log.init(allocator);
|
|
temp_log.level = log.level;
|
|
var parser = js_parser.Parser.init(opts, &temp_log, source, defines, allocator) catch {
|
|
temp_log.appendToMaybeRecycled(log, source) catch {};
|
|
return null;
|
|
};
|
|
|
|
const result = parser.parse() catch |err| {
|
|
if (temp_log.errors == 0) {
|
|
log.addRangeError(source, parser.lexer.range(), @errorName(err)) catch unreachable;
|
|
}
|
|
|
|
temp_log.appendToMaybeRecycled(log, source) catch {};
|
|
return null;
|
|
};
|
|
|
|
temp_log.appendToMaybeRecycled(log, source) catch {};
|
|
return result;
|
|
}
|
|
|
|
pub fn scan(
|
|
_: *@This(),
|
|
allocator: std.mem.Allocator,
|
|
scan_pass_result: *js_parser.ScanPassResult,
|
|
opts: js_parser.Parser.Options,
|
|
defines: *Define,
|
|
log: *logger.Log,
|
|
source: *const logger.Source,
|
|
) anyerror!void {
|
|
if (strings.trim(source.contents, "\n\t\r ").len == 0) {
|
|
return;
|
|
}
|
|
|
|
var temp_log = logger.Log.init(allocator);
|
|
defer temp_log.appendToMaybeRecycled(log, source) catch {};
|
|
|
|
var parser = js_parser.Parser.init(opts, &temp_log, source, defines, allocator) catch return;
|
|
|
|
return try parser.scanImports(scan_pass_result);
|
|
}
|
|
};
|
|
|
|
pub const Json = struct {
|
|
pub fn init(_: std.mem.Allocator) Json {
|
|
return Json{};
|
|
}
|
|
fn parse(_: *@This(), log: *logger.Log, source: *const logger.Source, allocator: std.mem.Allocator, comptime func: anytype, comptime force_utf8: bool) anyerror!?js_ast.Expr {
|
|
var temp_log = logger.Log.init(allocator);
|
|
defer {
|
|
temp_log.appendToMaybeRecycled(log, source) catch {};
|
|
}
|
|
return func(source, &temp_log, allocator, force_utf8) catch handler: {
|
|
break :handler null;
|
|
};
|
|
}
|
|
pub fn parseJSON(cache: *@This(), log: *logger.Log, source: *const logger.Source, allocator: std.mem.Allocator, mode: enum { json, jsonc }, comptime force_utf8: bool) anyerror!?js_ast.Expr {
|
|
// tsconfig.* and jsconfig.* files are JSON files, but they are not valid JSON files.
|
|
// They are JSON files with comments and trailing commas.
|
|
// Sometimes tooling expects this to work.
|
|
if (mode == .jsonc) {
|
|
return try parse(cache, log, source, allocator, json_parser.parseTSConfig, force_utf8);
|
|
}
|
|
|
|
return try parse(cache, log, source, allocator, json_parser.parse, force_utf8);
|
|
}
|
|
|
|
pub fn parsePackageJSON(cache: *@This(), log: *logger.Log, source: *const logger.Source, allocator: std.mem.Allocator, comptime force_utf8: bool) anyerror!?js_ast.Expr {
|
|
return try parse(cache, log, source, allocator, json_parser.parseTSConfig, force_utf8);
|
|
}
|
|
|
|
pub fn parseTSConfig(cache: *@This(), log: *logger.Log, source: *const logger.Source, allocator: std.mem.Allocator) anyerror!?js_ast.Expr {
|
|
return try parse(cache, log, source, allocator, json_parser.parseTSConfig, true);
|
|
}
|
|
};
|
|
|
|
const string = []const u8;
|
|
|
|
const fs = @import("./fs.zig");
|
|
const std = @import("std");
|
|
const Define = @import("./defines.zig").Define;
|
|
|
|
const bun = @import("bun");
|
|
const Environment = bun.Environment;
|
|
const FeatureFlags = bun.FeatureFlags;
|
|
const Global = bun.Global;
|
|
const MutableString = bun.MutableString;
|
|
const Output = bun.Output;
|
|
const StoredFileDescriptorType = bun.StoredFileDescriptorType;
|
|
const default_allocator = bun.default_allocator;
|
|
const js_ast = bun.ast;
|
|
const js_parser = bun.js_parser;
|
|
const json_parser = bun.json;
|
|
const logger = bun.logger;
|
|
const strings = bun.strings;
|